repo_id stringclasses 875
values | size int64 974 38.9k | file_path stringlengths 10 308 | content stringlengths 974 38.9k |
|---|---|---|---|
apache/httpcomponents-client | 36,241 | httpclient5/src/main/java/org/apache/hc/client5/http/impl/nio/PoolingAsyncClientConnectionManager.java | /*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.hc.client5.http.impl.nio;
import java.nio.file.Path;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.hc.client5.http.DnsResolver;
import org.apache.hc.client5.http.EndpointInfo;
import org.apache.hc.client5.http.HttpRoute;
import org.apache.hc.client5.http.SchemePortResolver;
import org.apache.hc.client5.http.config.ConnectionConfig;
import org.apache.hc.client5.http.config.TlsConfig;
import org.apache.hc.client5.http.impl.ConnPoolSupport;
import org.apache.hc.client5.http.impl.ConnectionHolder;
import org.apache.hc.client5.http.impl.ConnectionShutdownException;
import org.apache.hc.client5.http.impl.PrefixedIncrementingId;
import org.apache.hc.client5.http.nio.AsyncClientConnectionManager;
import org.apache.hc.client5.http.nio.AsyncClientConnectionOperator;
import org.apache.hc.client5.http.nio.AsyncConnectionEndpoint;
import org.apache.hc.client5.http.nio.ManagedAsyncClientConnection;
import org.apache.hc.client5.http.ssl.DefaultClientTlsStrategy;
import org.apache.hc.core5.annotation.Contract;
import org.apache.hc.core5.annotation.Internal;
import org.apache.hc.core5.annotation.ThreadingBehavior;
import org.apache.hc.core5.concurrent.BasicFuture;
import org.apache.hc.core5.concurrent.CallbackContribution;
import org.apache.hc.core5.concurrent.ComplexFuture;
import org.apache.hc.core5.concurrent.FutureCallback;
import org.apache.hc.core5.function.Resolver;
import org.apache.hc.core5.http.HttpConnection;
import org.apache.hc.core5.http.HttpHost;
import org.apache.hc.core5.http.HttpVersion;
import org.apache.hc.core5.http.ProtocolVersion;
import org.apache.hc.core5.http.URIScheme;
import org.apache.hc.core5.http.config.Lookup;
import org.apache.hc.core5.http.config.RegistryBuilder;
import org.apache.hc.core5.http.nio.AsyncClientExchangeHandler;
import org.apache.hc.core5.http.nio.AsyncPushConsumer;
import org.apache.hc.core5.http.nio.HandlerFactory;
import org.apache.hc.core5.http.nio.command.RequestExecutionCommand;
import org.apache.hc.core5.http.nio.command.StaleCheckCommand;
import org.apache.hc.core5.http.nio.ssl.TlsStrategy;
import org.apache.hc.core5.http.protocol.HttpContext;
import org.apache.hc.core5.http2.HttpVersionPolicy;
import org.apache.hc.core5.http2.nio.command.PingCommand;
import org.apache.hc.core5.http2.nio.support.BasicPingHandler;
import org.apache.hc.core5.http2.ssl.ApplicationProtocol;
import org.apache.hc.core5.io.CloseMode;
import org.apache.hc.core5.pool.ConnPoolControl;
import org.apache.hc.core5.pool.DefaultDisposalCallback;
import org.apache.hc.core5.pool.LaxConnPool;
import org.apache.hc.core5.pool.ManagedConnPool;
import org.apache.hc.core5.pool.PoolConcurrencyPolicy;
import org.apache.hc.core5.pool.PoolEntry;
import org.apache.hc.core5.pool.PoolReusePolicy;
import org.apache.hc.core5.pool.PoolStats;
import org.apache.hc.core5.pool.StrictConnPool;
import org.apache.hc.core5.reactor.Command;
import org.apache.hc.core5.reactor.ConnectionInitiator;
import org.apache.hc.core5.reactor.ProtocolIOSession;
import org.apache.hc.core5.reactor.ssl.TlsDetails;
import org.apache.hc.core5.util.Args;
import org.apache.hc.core5.util.Deadline;
import org.apache.hc.core5.util.Identifiable;
import org.apache.hc.core5.util.TimeValue;
import org.apache.hc.core5.util.Timeout;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* {@code PoolingAsyncClientConnectionManager} maintains a pool of non-blocking
* {@link org.apache.hc.core5.http.HttpConnection}s and is able to service
* connection requests from multiple execution threads. Connections are pooled
* on a per route basis. A request for a route which already the manager has
* persistent connections for available in the pool will be services by leasing
* a connection from the pool rather than creating a new connection.
* <p>
* {@code PoolingAsyncClientConnectionManager} maintains a maximum limit
* of connection on a per route basis and in total. Connection limits
* can be adjusted using {@link ConnPoolControl} methods.
* <p>
* Total time to live (TTL) set at construction time defines maximum life span
* of persistent connections regardless of their expiration setting. No persistent
* connection will be re-used past its TTL value.
*
* @since 5.0
*/
@Contract(threading = ThreadingBehavior.SAFE_CONDITIONAL)
public class PoolingAsyncClientConnectionManager implements AsyncClientConnectionManager, ConnPoolControl<HttpRoute> {
private static final Logger LOG = LoggerFactory.getLogger(PoolingAsyncClientConnectionManager.class);
public static final int DEFAULT_MAX_TOTAL_CONNECTIONS = 25;
public static final int DEFAULT_MAX_CONNECTIONS_PER_ROUTE = 5;
private final ManagedConnPool<HttpRoute, ManagedAsyncClientConnection> pool;
private final AsyncClientConnectionOperator connectionOperator;
private final AtomicBoolean closed;
private volatile Resolver<HttpRoute, ConnectionConfig> connectionConfigResolver;
private volatile Resolver<HttpHost, TlsConfig> tlsConfigResolver;
public PoolingAsyncClientConnectionManager() {
this(RegistryBuilder.<TlsStrategy>create()
.register(URIScheme.HTTPS.getId(), DefaultClientTlsStrategy.createDefault())
.build());
}
public PoolingAsyncClientConnectionManager(final Lookup<TlsStrategy> tlsStrategyLookup) {
this(tlsStrategyLookup, PoolConcurrencyPolicy.STRICT, TimeValue.NEG_ONE_MILLISECOND);
}
public PoolingAsyncClientConnectionManager(
final Lookup<TlsStrategy> tlsStrategyLookup,
final PoolConcurrencyPolicy poolConcurrencyPolicy,
final TimeValue timeToLive) {
this(tlsStrategyLookup, poolConcurrencyPolicy, PoolReusePolicy.LIFO, timeToLive);
}
public PoolingAsyncClientConnectionManager(
final Lookup<TlsStrategy> tlsStrategyLookup,
final PoolConcurrencyPolicy poolConcurrencyPolicy,
final PoolReusePolicy poolReusePolicy,
final TimeValue timeToLive) {
this(tlsStrategyLookup, poolConcurrencyPolicy, poolReusePolicy, timeToLive, null, null);
}
public PoolingAsyncClientConnectionManager(
final Lookup<TlsStrategy> tlsStrategyLookup,
final PoolConcurrencyPolicy poolConcurrencyPolicy,
final PoolReusePolicy poolReusePolicy,
final TimeValue timeToLive,
final SchemePortResolver schemePortResolver,
final DnsResolver dnsResolver) {
this(new DefaultAsyncClientConnectionOperator(tlsStrategyLookup, schemePortResolver, dnsResolver),
poolConcurrencyPolicy, poolReusePolicy, timeToLive, false);
}
@Internal
public PoolingAsyncClientConnectionManager(
final AsyncClientConnectionOperator connectionOperator,
final PoolConcurrencyPolicy poolConcurrencyPolicy,
final PoolReusePolicy poolReusePolicy,
final TimeValue timeToLive,
final boolean messageMultiplexing) {
this.connectionOperator = Args.notNull(connectionOperator, "Connection operator");
final ManagedConnPool<HttpRoute, ManagedAsyncClientConnection> managedConnPool;
switch (poolConcurrencyPolicy != null ? poolConcurrencyPolicy : PoolConcurrencyPolicy.STRICT) {
case STRICT:
managedConnPool = new StrictConnPool<HttpRoute, ManagedAsyncClientConnection>(
DEFAULT_MAX_CONNECTIONS_PER_ROUTE,
DEFAULT_MAX_TOTAL_CONNECTIONS,
timeToLive,
poolReusePolicy,
new DefaultDisposalCallback<>(),
null) {
@Override
public void closeExpired() {
enumAvailable(e -> closeIfExpired(e));
}
};
break;
case LAX:
managedConnPool = new LaxConnPool<HttpRoute, ManagedAsyncClientConnection>(
DEFAULT_MAX_CONNECTIONS_PER_ROUTE,
timeToLive,
poolReusePolicy,
null) {
@Override
public void closeExpired() {
enumAvailable(e -> closeIfExpired(e));
}
};
break;
default:
throw new IllegalArgumentException("Unexpected PoolConcurrencyPolicy value: " + poolConcurrencyPolicy);
}
this.pool = messageMultiplexing ? new H2SharingConnPool<>(managedConnPool) : managedConnPool;
this.closed = new AtomicBoolean(false);
}
@Internal
protected PoolingAsyncClientConnectionManager(
final ManagedConnPool<HttpRoute, ManagedAsyncClientConnection> pool,
final AsyncClientConnectionOperator connectionOperator) {
this.connectionOperator = Args.notNull(connectionOperator, "Connection operator");
this.pool = Args.notNull(pool, "Connection pool");
this.closed = new AtomicBoolean(false);
}
@Override
public void close() {
close(CloseMode.GRACEFUL);
}
@Override
public void close(final CloseMode closeMode) {
if (this.closed.compareAndSet(false, true)) {
if (LOG.isDebugEnabled()) {
LOG.debug("Shutdown connection pool {}", closeMode);
}
this.pool.close(closeMode);
LOG.debug("Connection pool shut down");
}
}
private InternalConnectionEndpoint cast(final AsyncConnectionEndpoint endpoint) {
if (endpoint instanceof InternalConnectionEndpoint) {
return (InternalConnectionEndpoint) endpoint;
}
throw new IllegalStateException("Unexpected endpoint class: " + endpoint.getClass());
}
private ConnectionConfig resolveConnectionConfig(final HttpRoute route) {
final Resolver<HttpRoute, ConnectionConfig> resolver = this.connectionConfigResolver;
final ConnectionConfig connectionConfig = resolver != null ? resolver.resolve(route) : null;
return connectionConfig != null ? connectionConfig : ConnectionConfig.DEFAULT;
}
private TlsConfig resolveTlsConfig(final HttpHost host) {
final Resolver<HttpHost, TlsConfig> resolver = this.tlsConfigResolver;
TlsConfig tlsConfig = resolver != null ? resolver.resolve(host) : null;
if (tlsConfig == null) {
tlsConfig = TlsConfig.DEFAULT;
}
if (URIScheme.HTTP.same(host.getSchemeName())
&& tlsConfig.getHttpVersionPolicy() == HttpVersionPolicy.NEGOTIATE) {
// Plain HTTP does not support protocol negotiation.
// Fall back to HTTP/1.1
tlsConfig = TlsConfig.copy(tlsConfig)
.setVersionPolicy(HttpVersionPolicy.FORCE_HTTP_1)
.build();
}
return tlsConfig;
}
@Override
public Future<AsyncConnectionEndpoint> lease(
final String id,
final HttpRoute route,
final Object state,
final Timeout requestTimeout,
final FutureCallback<AsyncConnectionEndpoint> callback) {
if (LOG.isDebugEnabled()) {
LOG.debug("{} endpoint lease request ({}) {}", id, requestTimeout, ConnPoolSupport.formatStats(route, state, pool));
}
return new Future<AsyncConnectionEndpoint>() {
final ConnectionConfig connectionConfig = resolveConnectionConfig(route);
final BasicFuture<AsyncConnectionEndpoint> resultFuture = new BasicFuture<>(callback);
final Future<PoolEntry<HttpRoute, ManagedAsyncClientConnection>> leaseFuture = pool.lease(
route,
state,
requestTimeout, new FutureCallback<PoolEntry<HttpRoute, ManagedAsyncClientConnection>>() {
@Override
public void completed(final PoolEntry<HttpRoute, ManagedAsyncClientConnection> poolEntry) {
if (poolEntry.hasConnection()) {
final TimeValue timeToLive = connectionConfig.getTimeToLive();
if (TimeValue.isNonNegative(timeToLive)) {
if (timeToLive.getDuration() == 0
|| Deadline.calculate(poolEntry.getCreated(), timeToLive).isExpired()) {
poolEntry.discardConnection(CloseMode.GRACEFUL);
}
}
}
if (poolEntry.hasConnection()) {
final TimeValue idleTimeout = connectionConfig.getIdleTimeout();
if (TimeValue.isPositive(idleTimeout)) {
if (Deadline.calculate(poolEntry.getUpdated(), idleTimeout).isExpired()) {
poolEntry.discardConnection(CloseMode.GRACEFUL);
}
}
}
if (poolEntry.hasConnection()) {
final ManagedAsyncClientConnection connection = poolEntry.getConnection();
final TimeValue timeValue = connectionConfig.getValidateAfterInactivity();
if (connection.isOpen() && TimeValue.isNonNegative(timeValue)) {
if (timeValue.getDuration() == 0
|| Deadline.calculate(poolEntry.getUpdated(), timeValue).isExpired()) {
final ProtocolVersion protocolVersion = connection.getProtocolVersion();
if (protocolVersion != null && protocolVersion.greaterEquals(HttpVersion.HTTP_2_0)) {
connection.submitCommand(new PingCommand(new BasicPingHandler(result -> {
if (result == null || !result) {
if (LOG.isDebugEnabled()) {
LOG.debug("{} connection {} is stale", id, ConnPoolSupport.getId(connection));
}
poolEntry.discardConnection(CloseMode.GRACEFUL);
}
leaseCompleted(poolEntry);
})), Command.Priority.IMMEDIATE);
return;
} else {
connection.submitCommand(new StaleCheckCommand(result -> {
if (!Boolean.TRUE.equals(result)) {
if (LOG.isDebugEnabled()) {
LOG.debug("{} connection {} is stale", id, ConnPoolSupport.getId(connection));
}
poolEntry.discardConnection(CloseMode.GRACEFUL);
}
leaseCompleted(poolEntry);
}), Command.Priority.IMMEDIATE);
return;
}
}
}
}
leaseCompleted(poolEntry);
}
void leaseCompleted(final PoolEntry<HttpRoute, ManagedAsyncClientConnection> poolEntry) {
final ManagedAsyncClientConnection connection = poolEntry.getConnection();
if (connection != null) {
connection.activate();
if (connectionConfig.getSocketTimeout() != null) {
connection.setSocketTimeout(connectionConfig.getSocketTimeout());
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("{} endpoint leased {}", id, ConnPoolSupport.formatStats(route, state, pool));
}
final AsyncConnectionEndpoint endpoint = new InternalConnectionEndpoint(poolEntry);
if (LOG.isDebugEnabled()) {
LOG.debug("{} acquired {}", id, ConnPoolSupport.getId(endpoint));
}
resultFuture.completed(endpoint);
}
@Override
public void failed(final Exception ex) {
if (LOG.isDebugEnabled()) {
LOG.debug("{} endpoint lease failed", id);
}
resultFuture.failed(ex);
}
@Override
public void cancelled() {
if (LOG.isDebugEnabled()) {
LOG.debug("{} endpoint lease cancelled", id);
}
resultFuture.cancel();
}
});
@Override
public AsyncConnectionEndpoint get() throws InterruptedException, ExecutionException {
return resultFuture.get();
}
@Override
public AsyncConnectionEndpoint get(
final long timeout, final TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException {
return resultFuture.get(timeout, unit);
}
@Override
public boolean cancel(final boolean mayInterruptIfRunning) {
return leaseFuture.cancel(mayInterruptIfRunning);
}
@Override
public boolean isDone() {
return resultFuture.isDone();
}
@Override
public boolean isCancelled() {
return resultFuture.isCancelled();
}
};
}
@Override
public void release(final AsyncConnectionEndpoint endpoint, final Object state, final TimeValue keepAlive) {
Args.notNull(endpoint, "Managed endpoint");
Args.notNull(keepAlive, "Keep-alive time");
final PoolEntry<HttpRoute, ManagedAsyncClientConnection> entry = cast(endpoint).detach();
if (entry == null) {
return;
}
if (LOG.isDebugEnabled()) {
LOG.debug("{} releasing endpoint", ConnPoolSupport.getId(endpoint));
}
if (this.isClosed()) {
return;
}
final ManagedAsyncClientConnection connection = entry.getConnection();
boolean reusable = connection != null && connection.isOpen();
try {
if (reusable) {
entry.updateState(state);
entry.updateExpiry(keepAlive);
connection.passivate();
if (LOG.isDebugEnabled()) {
final String s;
if (TimeValue.isPositive(keepAlive)) {
s = "for " + keepAlive;
} else {
s = "indefinitely";
}
LOG.debug("{} connection {} can be kept alive {}", ConnPoolSupport.getId(endpoint), ConnPoolSupport.getId(connection), s);
}
}
} catch (final RuntimeException ex) {
reusable = false;
throw ex;
} finally {
pool.release(entry, reusable);
if (LOG.isDebugEnabled()) {
LOG.debug("{} connection released {}", ConnPoolSupport.getId(endpoint), ConnPoolSupport.formatStats(entry.getRoute(), entry.getState(), pool));
}
}
}
@Override
public Future<AsyncConnectionEndpoint> connect(
final AsyncConnectionEndpoint endpoint,
final ConnectionInitiator connectionInitiator,
final Timeout timeout,
final Object attachment,
final HttpContext context,
final FutureCallback<AsyncConnectionEndpoint> callback) {
Args.notNull(endpoint, "Endpoint");
Args.notNull(connectionInitiator, "Connection initiator");
final InternalConnectionEndpoint internalEndpoint = cast(endpoint);
final ComplexFuture<AsyncConnectionEndpoint> resultFuture = new ComplexFuture<>(callback);
if (internalEndpoint.isConnected()) {
resultFuture.completed(endpoint);
return resultFuture;
}
final PoolEntry<HttpRoute, ManagedAsyncClientConnection> poolEntry = internalEndpoint.getPoolEntry();
final HttpRoute route = poolEntry.getRoute();
final Path unixDomainSocket = route.getUnixDomainSocket();
final HttpHost firstHop = route.getProxyHost() != null ? route.getProxyHost() : route.getTargetHost();
final ConnectionConfig connectionConfig = resolveConnectionConfig(route);
final Timeout connectTimeout = timeout != null ? timeout : connectionConfig.getConnectTimeout();
if (LOG.isDebugEnabled()) {
LOG.debug("{} connecting endpoint to {} ({})", ConnPoolSupport.getId(endpoint), firstHop, connectTimeout);
}
final Future<ManagedAsyncClientConnection> connectFuture = connectionOperator.connect(
connectionInitiator,
firstHop,
unixDomainSocket,
route.getTargetName(),
route.getLocalSocketAddress(),
connectTimeout,
route.isTunnelled() ? null : resolveTlsConfig(route.getTargetHost()),
context,
new FutureCallback<ManagedAsyncClientConnection>() {
@Override
public void completed(final ManagedAsyncClientConnection connection) {
try {
if (LOG.isDebugEnabled()) {
LOG.debug("{} connected {}", ConnPoolSupport.getId(endpoint), ConnPoolSupport.getId(connection));
}
final Timeout socketTimeout = connectionConfig.getSocketTimeout();
if (socketTimeout != null) {
connection.setSocketTimeout(socketTimeout);
}
poolEntry.assignConnection(connection);
resultFuture.completed(internalEndpoint);
} catch (final RuntimeException ex) {
resultFuture.failed(ex);
}
}
@Override
public void failed(final Exception ex) {
resultFuture.failed(ex);
}
@Override
public void cancelled() {
resultFuture.cancel();
}
});
resultFuture.setDependency(connectFuture);
return resultFuture;
}
@Override
public void upgrade(
final AsyncConnectionEndpoint endpoint,
final Object attachment,
final HttpContext context,
final FutureCallback<AsyncConnectionEndpoint> callback) {
Args.notNull(endpoint, "Managed endpoint");
final InternalConnectionEndpoint internalEndpoint = cast(endpoint);
final PoolEntry<HttpRoute, ManagedAsyncClientConnection> poolEntry = internalEndpoint.getValidatedPoolEntry();
final HttpRoute route = poolEntry.getRoute();
final HttpHost target = route.getTargetHost();
connectionOperator.upgrade(
poolEntry.getConnection(),
target,
route.getTargetName(),
attachment != null ? attachment : resolveTlsConfig(target),
context,
new CallbackContribution<ManagedAsyncClientConnection>(callback) {
@Override
public void completed(final ManagedAsyncClientConnection connection) {
if (LOG.isDebugEnabled()) {
LOG.debug("{} upgraded {}", ConnPoolSupport.getId(internalEndpoint), ConnPoolSupport.getId(connection));
}
final TlsDetails tlsDetails = connection.getTlsDetails();
if (tlsDetails != null && ApplicationProtocol.HTTP_2.id.equals(tlsDetails.getApplicationProtocol())) {
connection.switchProtocol(ApplicationProtocol.HTTP_2.id, new CallbackContribution<ProtocolIOSession>(callback) {
@Override
public void completed(final ProtocolIOSession protocolIOSession) {
if (callback != null) {
callback.completed(endpoint);
}
}
});
} else {
if (callback != null) {
callback.completed(endpoint);
}
}
}
});
}
@Override
public void upgrade(final AsyncConnectionEndpoint endpoint, final Object attachment, final HttpContext context) {
upgrade(endpoint, attachment, context, null);
}
@Override
public Set<HttpRoute> getRoutes() {
return pool.getRoutes();
}
@Override
public void setMaxTotal(final int max) {
pool.setMaxTotal(max);
}
@Override
public int getMaxTotal() {
return pool.getMaxTotal();
}
@Override
public void setDefaultMaxPerRoute(final int max) {
pool.setDefaultMaxPerRoute(max);
}
@Override
public int getDefaultMaxPerRoute() {
return pool.getDefaultMaxPerRoute();
}
@Override
public void setMaxPerRoute(final HttpRoute route, final int max) {
pool.setMaxPerRoute(route, max);
}
@Override
public int getMaxPerRoute(final HttpRoute route) {
return pool.getMaxPerRoute(route);
}
@Override
public void closeIdle(final TimeValue idletime) {
if (isClosed()) {
return;
}
pool.closeIdle(idletime);
}
@Override
public void closeExpired() {
if (isClosed()) {
return;
}
pool.closeExpired();
}
@Override
public PoolStats getTotalStats() {
return pool.getTotalStats();
}
@Override
public PoolStats getStats(final HttpRoute route) {
return pool.getStats(route);
}
/**
* Sets the same {@link ConnectionConfig} for all routes
*
* @since 5.2
*/
public void setDefaultConnectionConfig(final ConnectionConfig config) {
this.connectionConfigResolver = route -> config;
}
/**
* Sets {@link Resolver} of {@link ConnectionConfig} on a per route basis.
*
* @since 5.2
*/
public void setConnectionConfigResolver(final Resolver<HttpRoute, ConnectionConfig> connectionConfigResolver) {
this.connectionConfigResolver = connectionConfigResolver;
}
/**
* Sets the same {@link ConnectionConfig} for all hosts
*
* @since 5.2
*/
public void setDefaultTlsConfig(final TlsConfig config) {
this.tlsConfigResolver = host -> config;
}
/**
* Sets {@link Resolver} of {@link TlsConfig} on a per host basis.
*
* @since 5.2
*/
public void setTlsConfigResolver(final Resolver<HttpHost, TlsConfig> tlsConfigResolver) {
this.tlsConfigResolver = tlsConfigResolver;
}
void closeIfExpired(final PoolEntry<HttpRoute, ManagedAsyncClientConnection> entry) {
final long now = System.currentTimeMillis();
if (entry.getExpiryDeadline().isBefore(now)) {
entry.discardConnection(CloseMode.GRACEFUL);
} else {
final ConnectionConfig connectionConfig = resolveConnectionConfig(entry.getRoute());
final TimeValue timeToLive = connectionConfig.getTimeToLive();
if (timeToLive != null && Deadline.calculate(entry.getCreated(), timeToLive).isBefore(now)) {
entry.discardConnection(CloseMode.GRACEFUL);
}
}
}
/**
* @deprecated Use custom {@link #setConnectionConfigResolver(Resolver)}
*/
@Deprecated
public TimeValue getValidateAfterInactivity() {
return ConnectionConfig.DEFAULT.getValidateAfterInactivity();
}
/**
* Defines period of inactivity after which persistent connections must
* be re-validated prior to being {@link #lease(String, HttpRoute, Object, Timeout,
* FutureCallback)} leased} to the consumer. Negative values passed
* to this method disable connection validation. This check helps detect connections
* that have become stale (half-closed) while kept inactive in the pool.
*
* @deprecated Use {@link #setConnectionConfigResolver(Resolver)}.
*/
@Deprecated
public void setValidateAfterInactivity(final TimeValue validateAfterInactivity) {
setDefaultConnectionConfig(ConnectionConfig.custom()
.setValidateAfterInactivity(validateAfterInactivity)
.build());
}
private static final PrefixedIncrementingId INCREMENTING_ID = new PrefixedIncrementingId("ep-");
static class InternalConnectionEndpoint extends AsyncConnectionEndpoint implements ConnectionHolder, Identifiable {
private final AtomicReference<PoolEntry<HttpRoute, ManagedAsyncClientConnection>> poolEntryRef;
private final String id;
InternalConnectionEndpoint(final PoolEntry<HttpRoute, ManagedAsyncClientConnection> poolEntry) {
this.poolEntryRef = new AtomicReference<>(poolEntry);
this.id = INCREMENTING_ID.getNextId();
}
@Override
public String getId() {
return id;
}
PoolEntry<HttpRoute, ManagedAsyncClientConnection> getPoolEntry() {
final PoolEntry<HttpRoute, ManagedAsyncClientConnection> poolEntry = poolEntryRef.get();
if (poolEntry == null) {
throw new ConnectionShutdownException();
}
return poolEntry;
}
PoolEntry<HttpRoute, ManagedAsyncClientConnection> getValidatedPoolEntry() {
final PoolEntry<HttpRoute, ManagedAsyncClientConnection> poolEntry = getPoolEntry();
if (poolEntry.getConnection() == null) {
throw new ConnectionShutdownException();
}
return poolEntry;
}
PoolEntry<HttpRoute, ManagedAsyncClientConnection> detach() {
return poolEntryRef.getAndSet(null);
}
@Override
public void close(final CloseMode closeMode) {
final PoolEntry<HttpRoute, ManagedAsyncClientConnection> poolEntry = poolEntryRef.get();
if (poolEntry != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("{} close {}", id, closeMode);
}
poolEntry.discardConnection(closeMode);
}
}
@Override
public boolean isConnected() {
final PoolEntry<HttpRoute, ManagedAsyncClientConnection> poolEntry = poolEntryRef.get();
if (poolEntry == null) {
return false;
}
final ManagedAsyncClientConnection connection = poolEntry.getConnection();
if (connection == null) {
return false;
}
if (!connection.isOpen()) {
poolEntry.discardConnection(CloseMode.IMMEDIATE);
return false;
}
return true;
}
@Override
public void setSocketTimeout(final Timeout timeout) {
getValidatedPoolEntry().getConnection().setSocketTimeout(timeout);
}
@Override
public void execute(
final String exchangeId,
final AsyncClientExchangeHandler exchangeHandler,
final HandlerFactory<AsyncPushConsumer> pushHandlerFactory,
final HttpContext context) {
final ManagedAsyncClientConnection connection = getValidatedPoolEntry().getConnection();
if (LOG.isDebugEnabled()) {
LOG.debug("{} executing exchange {} over {}", id, exchangeId, ConnPoolSupport.getId(connection));
}
context.setProtocolVersion(connection.getProtocolVersion());
connection.submitCommand(
new RequestExecutionCommand(exchangeHandler, pushHandlerFactory, context),
Command.Priority.NORMAL);
}
@Override
public EndpointInfo getInfo() {
final PoolEntry<HttpRoute, ManagedAsyncClientConnection> poolEntry = poolEntryRef.get();
if (poolEntry != null) {
final ManagedAsyncClientConnection connection = poolEntry.getConnection();
if (connection != null && connection.isOpen()) {
final TlsDetails tlsDetails = connection.getTlsDetails();
return new EndpointInfo(connection.getProtocolVersion(), tlsDetails != null ? tlsDetails.getSSLSession() : null);
}
}
return null;
}
@Override
public HttpConnection get() {
final PoolEntry<HttpRoute, ManagedAsyncClientConnection> poolEntry = poolEntryRef.get();
return poolEntry != null ? poolEntry.getConnection() : null;
}
}
/**
* Method that can be called to determine whether the connection manager has been shut down and
* is closed or not.
*
* @return {@code true} if the connection manager has been shut down and is closed, otherwise
* return {@code false}.
* @since 5.4
*/
public boolean isClosed() {
return this.closed.get();
}
}
|
googleapis/google-cloud-java | 36,338 | java-oracledatabase/google-cloud-oracledatabase/src/main/java/com/google/cloud/oracledatabase/v1/OracleDatabaseSettings.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.oracledatabase.v1;
import static com.google.cloud.oracledatabase.v1.OracleDatabaseClient.ListAutonomousDatabaseBackupsPagedResponse;
import static com.google.cloud.oracledatabase.v1.OracleDatabaseClient.ListAutonomousDatabaseCharacterSetsPagedResponse;
import static com.google.cloud.oracledatabase.v1.OracleDatabaseClient.ListAutonomousDatabasesPagedResponse;
import static com.google.cloud.oracledatabase.v1.OracleDatabaseClient.ListAutonomousDbVersionsPagedResponse;
import static com.google.cloud.oracledatabase.v1.OracleDatabaseClient.ListCloudExadataInfrastructuresPagedResponse;
import static com.google.cloud.oracledatabase.v1.OracleDatabaseClient.ListCloudVmClustersPagedResponse;
import static com.google.cloud.oracledatabase.v1.OracleDatabaseClient.ListDbNodesPagedResponse;
import static com.google.cloud.oracledatabase.v1.OracleDatabaseClient.ListDbServersPagedResponse;
import static com.google.cloud.oracledatabase.v1.OracleDatabaseClient.ListDbSystemShapesPagedResponse;
import static com.google.cloud.oracledatabase.v1.OracleDatabaseClient.ListEntitlementsPagedResponse;
import static com.google.cloud.oracledatabase.v1.OracleDatabaseClient.ListGiVersionsPagedResponse;
import static com.google.cloud.oracledatabase.v1.OracleDatabaseClient.ListLocationsPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.BetaApi;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.ClientSettings;
import com.google.api.gax.rpc.OperationCallSettings;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.cloud.oracledatabase.v1.stub.OracleDatabaseStubSettings;
import com.google.longrunning.Operation;
import com.google.protobuf.Empty;
import java.io.IOException;
import java.util.List;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link OracleDatabaseClient}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (oracledatabase.googleapis.com) and default port (443) are
* used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the
* [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings)
* of getCloudExadataInfrastructure:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* OracleDatabaseSettings.Builder oracleDatabaseSettingsBuilder =
* OracleDatabaseSettings.newBuilder();
* oracleDatabaseSettingsBuilder
* .getCloudExadataInfrastructureSettings()
* .setRetrySettings(
* oracleDatabaseSettingsBuilder
* .getCloudExadataInfrastructureSettings()
* .getRetrySettings()
* .toBuilder()
* .setInitialRetryDelayDuration(Duration.ofSeconds(1))
* .setInitialRpcTimeoutDuration(Duration.ofSeconds(5))
* .setMaxAttempts(5)
* .setMaxRetryDelayDuration(Duration.ofSeconds(30))
* .setMaxRpcTimeoutDuration(Duration.ofSeconds(60))
* .setRetryDelayMultiplier(1.3)
* .setRpcTimeoutMultiplier(1.5)
* .setTotalTimeoutDuration(Duration.ofSeconds(300))
* .build());
* OracleDatabaseSettings oracleDatabaseSettings = oracleDatabaseSettingsBuilder.build();
* }</pre>
*
* Please refer to the [Client Side Retry
* Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for
* additional support in setting retries.
*
* <p>To configure the RetrySettings of a Long Running Operation method, create an
* OperationTimedPollAlgorithm object and update the RPC's polling algorithm. For example, to
* configure the RetrySettings for createCloudExadataInfrastructure:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* OracleDatabaseSettings.Builder oracleDatabaseSettingsBuilder =
* OracleDatabaseSettings.newBuilder();
* TimedRetryAlgorithm timedRetryAlgorithm =
* OperationalTimedPollAlgorithm.create(
* RetrySettings.newBuilder()
* .setInitialRetryDelayDuration(Duration.ofMillis(500))
* .setRetryDelayMultiplier(1.5)
* .setMaxRetryDelayDuration(Duration.ofMillis(5000))
* .setTotalTimeoutDuration(Duration.ofHours(24))
* .build());
* oracleDatabaseSettingsBuilder
* .createClusterOperationSettings()
* .setPollingAlgorithm(timedRetryAlgorithm)
* .build();
* }</pre>
*/
@Generated("by gapic-generator-java")
public class OracleDatabaseSettings extends ClientSettings<OracleDatabaseSettings> {
/** Returns the object with the settings used for calls to listCloudExadataInfrastructures. */
public PagedCallSettings<
ListCloudExadataInfrastructuresRequest,
ListCloudExadataInfrastructuresResponse,
ListCloudExadataInfrastructuresPagedResponse>
listCloudExadataInfrastructuresSettings() {
return ((OracleDatabaseStubSettings) getStubSettings())
.listCloudExadataInfrastructuresSettings();
}
/** Returns the object with the settings used for calls to getCloudExadataInfrastructure. */
public UnaryCallSettings<GetCloudExadataInfrastructureRequest, CloudExadataInfrastructure>
getCloudExadataInfrastructureSettings() {
return ((OracleDatabaseStubSettings) getStubSettings()).getCloudExadataInfrastructureSettings();
}
/** Returns the object with the settings used for calls to createCloudExadataInfrastructure. */
public UnaryCallSettings<CreateCloudExadataInfrastructureRequest, Operation>
createCloudExadataInfrastructureSettings() {
return ((OracleDatabaseStubSettings) getStubSettings())
.createCloudExadataInfrastructureSettings();
}
/** Returns the object with the settings used for calls to createCloudExadataInfrastructure. */
public OperationCallSettings<
CreateCloudExadataInfrastructureRequest, CloudExadataInfrastructure, OperationMetadata>
createCloudExadataInfrastructureOperationSettings() {
return ((OracleDatabaseStubSettings) getStubSettings())
.createCloudExadataInfrastructureOperationSettings();
}
/** Returns the object with the settings used for calls to deleteCloudExadataInfrastructure. */
public UnaryCallSettings<DeleteCloudExadataInfrastructureRequest, Operation>
deleteCloudExadataInfrastructureSettings() {
return ((OracleDatabaseStubSettings) getStubSettings())
.deleteCloudExadataInfrastructureSettings();
}
/** Returns the object with the settings used for calls to deleteCloudExadataInfrastructure. */
public OperationCallSettings<DeleteCloudExadataInfrastructureRequest, Empty, OperationMetadata>
deleteCloudExadataInfrastructureOperationSettings() {
return ((OracleDatabaseStubSettings) getStubSettings())
.deleteCloudExadataInfrastructureOperationSettings();
}
/** Returns the object with the settings used for calls to listCloudVmClusters. */
public PagedCallSettings<
ListCloudVmClustersRequest, ListCloudVmClustersResponse, ListCloudVmClustersPagedResponse>
listCloudVmClustersSettings() {
return ((OracleDatabaseStubSettings) getStubSettings()).listCloudVmClustersSettings();
}
/** Returns the object with the settings used for calls to getCloudVmCluster. */
public UnaryCallSettings<GetCloudVmClusterRequest, CloudVmCluster> getCloudVmClusterSettings() {
return ((OracleDatabaseStubSettings) getStubSettings()).getCloudVmClusterSettings();
}
/** Returns the object with the settings used for calls to createCloudVmCluster. */
public UnaryCallSettings<CreateCloudVmClusterRequest, Operation> createCloudVmClusterSettings() {
return ((OracleDatabaseStubSettings) getStubSettings()).createCloudVmClusterSettings();
}
/** Returns the object with the settings used for calls to createCloudVmCluster. */
public OperationCallSettings<CreateCloudVmClusterRequest, CloudVmCluster, OperationMetadata>
createCloudVmClusterOperationSettings() {
return ((OracleDatabaseStubSettings) getStubSettings()).createCloudVmClusterOperationSettings();
}
/** Returns the object with the settings used for calls to deleteCloudVmCluster. */
public UnaryCallSettings<DeleteCloudVmClusterRequest, Operation> deleteCloudVmClusterSettings() {
return ((OracleDatabaseStubSettings) getStubSettings()).deleteCloudVmClusterSettings();
}
/** Returns the object with the settings used for calls to deleteCloudVmCluster. */
public OperationCallSettings<DeleteCloudVmClusterRequest, Empty, OperationMetadata>
deleteCloudVmClusterOperationSettings() {
return ((OracleDatabaseStubSettings) getStubSettings()).deleteCloudVmClusterOperationSettings();
}
/** Returns the object with the settings used for calls to listEntitlements. */
public PagedCallSettings<
ListEntitlementsRequest, ListEntitlementsResponse, ListEntitlementsPagedResponse>
listEntitlementsSettings() {
return ((OracleDatabaseStubSettings) getStubSettings()).listEntitlementsSettings();
}
/** Returns the object with the settings used for calls to listDbServers. */
public PagedCallSettings<ListDbServersRequest, ListDbServersResponse, ListDbServersPagedResponse>
listDbServersSettings() {
return ((OracleDatabaseStubSettings) getStubSettings()).listDbServersSettings();
}
/** Returns the object with the settings used for calls to listDbNodes. */
public PagedCallSettings<ListDbNodesRequest, ListDbNodesResponse, ListDbNodesPagedResponse>
listDbNodesSettings() {
return ((OracleDatabaseStubSettings) getStubSettings()).listDbNodesSettings();
}
/** Returns the object with the settings used for calls to listGiVersions. */
public PagedCallSettings<
ListGiVersionsRequest, ListGiVersionsResponse, ListGiVersionsPagedResponse>
listGiVersionsSettings() {
return ((OracleDatabaseStubSettings) getStubSettings()).listGiVersionsSettings();
}
/** Returns the object with the settings used for calls to listDbSystemShapes. */
public PagedCallSettings<
ListDbSystemShapesRequest, ListDbSystemShapesResponse, ListDbSystemShapesPagedResponse>
listDbSystemShapesSettings() {
return ((OracleDatabaseStubSettings) getStubSettings()).listDbSystemShapesSettings();
}
/** Returns the object with the settings used for calls to listAutonomousDatabases. */
public PagedCallSettings<
ListAutonomousDatabasesRequest,
ListAutonomousDatabasesResponse,
ListAutonomousDatabasesPagedResponse>
listAutonomousDatabasesSettings() {
return ((OracleDatabaseStubSettings) getStubSettings()).listAutonomousDatabasesSettings();
}
/** Returns the object with the settings used for calls to getAutonomousDatabase. */
public UnaryCallSettings<GetAutonomousDatabaseRequest, AutonomousDatabase>
getAutonomousDatabaseSettings() {
return ((OracleDatabaseStubSettings) getStubSettings()).getAutonomousDatabaseSettings();
}
/** Returns the object with the settings used for calls to createAutonomousDatabase. */
public UnaryCallSettings<CreateAutonomousDatabaseRequest, Operation>
createAutonomousDatabaseSettings() {
return ((OracleDatabaseStubSettings) getStubSettings()).createAutonomousDatabaseSettings();
}
/** Returns the object with the settings used for calls to createAutonomousDatabase. */
public OperationCallSettings<
CreateAutonomousDatabaseRequest, AutonomousDatabase, OperationMetadata>
createAutonomousDatabaseOperationSettings() {
return ((OracleDatabaseStubSettings) getStubSettings())
.createAutonomousDatabaseOperationSettings();
}
/** Returns the object with the settings used for calls to deleteAutonomousDatabase. */
public UnaryCallSettings<DeleteAutonomousDatabaseRequest, Operation>
deleteAutonomousDatabaseSettings() {
return ((OracleDatabaseStubSettings) getStubSettings()).deleteAutonomousDatabaseSettings();
}
/** Returns the object with the settings used for calls to deleteAutonomousDatabase. */
public OperationCallSettings<DeleteAutonomousDatabaseRequest, Empty, OperationMetadata>
deleteAutonomousDatabaseOperationSettings() {
return ((OracleDatabaseStubSettings) getStubSettings())
.deleteAutonomousDatabaseOperationSettings();
}
/** Returns the object with the settings used for calls to restoreAutonomousDatabase. */
public UnaryCallSettings<RestoreAutonomousDatabaseRequest, Operation>
restoreAutonomousDatabaseSettings() {
return ((OracleDatabaseStubSettings) getStubSettings()).restoreAutonomousDatabaseSettings();
}
/** Returns the object with the settings used for calls to restoreAutonomousDatabase. */
public OperationCallSettings<
RestoreAutonomousDatabaseRequest, AutonomousDatabase, OperationMetadata>
restoreAutonomousDatabaseOperationSettings() {
return ((OracleDatabaseStubSettings) getStubSettings())
.restoreAutonomousDatabaseOperationSettings();
}
/** Returns the object with the settings used for calls to generateAutonomousDatabaseWallet. */
public UnaryCallSettings<
GenerateAutonomousDatabaseWalletRequest, GenerateAutonomousDatabaseWalletResponse>
generateAutonomousDatabaseWalletSettings() {
return ((OracleDatabaseStubSettings) getStubSettings())
.generateAutonomousDatabaseWalletSettings();
}
/** Returns the object with the settings used for calls to listAutonomousDbVersions. */
public PagedCallSettings<
ListAutonomousDbVersionsRequest,
ListAutonomousDbVersionsResponse,
ListAutonomousDbVersionsPagedResponse>
listAutonomousDbVersionsSettings() {
return ((OracleDatabaseStubSettings) getStubSettings()).listAutonomousDbVersionsSettings();
}
/** Returns the object with the settings used for calls to listAutonomousDatabaseCharacterSets. */
public PagedCallSettings<
ListAutonomousDatabaseCharacterSetsRequest,
ListAutonomousDatabaseCharacterSetsResponse,
ListAutonomousDatabaseCharacterSetsPagedResponse>
listAutonomousDatabaseCharacterSetsSettings() {
return ((OracleDatabaseStubSettings) getStubSettings())
.listAutonomousDatabaseCharacterSetsSettings();
}
/** Returns the object with the settings used for calls to listAutonomousDatabaseBackups. */
public PagedCallSettings<
ListAutonomousDatabaseBackupsRequest,
ListAutonomousDatabaseBackupsResponse,
ListAutonomousDatabaseBackupsPagedResponse>
listAutonomousDatabaseBackupsSettings() {
return ((OracleDatabaseStubSettings) getStubSettings()).listAutonomousDatabaseBackupsSettings();
}
/** Returns the object with the settings used for calls to stopAutonomousDatabase. */
public UnaryCallSettings<StopAutonomousDatabaseRequest, Operation>
stopAutonomousDatabaseSettings() {
return ((OracleDatabaseStubSettings) getStubSettings()).stopAutonomousDatabaseSettings();
}
/** Returns the object with the settings used for calls to stopAutonomousDatabase. */
public OperationCallSettings<StopAutonomousDatabaseRequest, AutonomousDatabase, OperationMetadata>
stopAutonomousDatabaseOperationSettings() {
return ((OracleDatabaseStubSettings) getStubSettings())
.stopAutonomousDatabaseOperationSettings();
}
/** Returns the object with the settings used for calls to startAutonomousDatabase. */
public UnaryCallSettings<StartAutonomousDatabaseRequest, Operation>
startAutonomousDatabaseSettings() {
return ((OracleDatabaseStubSettings) getStubSettings()).startAutonomousDatabaseSettings();
}
/** Returns the object with the settings used for calls to startAutonomousDatabase. */
public OperationCallSettings<
StartAutonomousDatabaseRequest, AutonomousDatabase, OperationMetadata>
startAutonomousDatabaseOperationSettings() {
return ((OracleDatabaseStubSettings) getStubSettings())
.startAutonomousDatabaseOperationSettings();
}
/** Returns the object with the settings used for calls to restartAutonomousDatabase. */
public UnaryCallSettings<RestartAutonomousDatabaseRequest, Operation>
restartAutonomousDatabaseSettings() {
return ((OracleDatabaseStubSettings) getStubSettings()).restartAutonomousDatabaseSettings();
}
/** Returns the object with the settings used for calls to restartAutonomousDatabase. */
public OperationCallSettings<
RestartAutonomousDatabaseRequest, AutonomousDatabase, OperationMetadata>
restartAutonomousDatabaseOperationSettings() {
return ((OracleDatabaseStubSettings) getStubSettings())
.restartAutonomousDatabaseOperationSettings();
}
/** Returns the object with the settings used for calls to listLocations. */
public PagedCallSettings<ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings() {
return ((OracleDatabaseStubSettings) getStubSettings()).listLocationsSettings();
}
/** Returns the object with the settings used for calls to getLocation. */
public UnaryCallSettings<GetLocationRequest, Location> getLocationSettings() {
return ((OracleDatabaseStubSettings) getStubSettings()).getLocationSettings();
}
public static final OracleDatabaseSettings create(OracleDatabaseStubSettings stub)
throws IOException {
return new OracleDatabaseSettings.Builder(stub.toBuilder()).build();
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return OracleDatabaseStubSettings.defaultExecutorProviderBuilder();
}
/** Returns the default service endpoint. */
public static String getDefaultEndpoint() {
return OracleDatabaseStubSettings.getDefaultEndpoint();
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return OracleDatabaseStubSettings.getDefaultServiceScopes();
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return OracleDatabaseStubSettings.defaultCredentialsProviderBuilder();
}
/** Returns a builder for the default gRPC ChannelProvider for this service. */
public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() {
return OracleDatabaseStubSettings.defaultGrpcTransportProviderBuilder();
}
/** Returns a builder for the default REST ChannelProvider for this service. */
@BetaApi
public static InstantiatingHttpJsonChannelProvider.Builder
defaultHttpJsonTransportProviderBuilder() {
return OracleDatabaseStubSettings.defaultHttpJsonTransportProviderBuilder();
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return OracleDatabaseStubSettings.defaultTransportChannelProvider();
}
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return OracleDatabaseStubSettings.defaultApiClientHeaderProviderBuilder();
}
/** Returns a new gRPC builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new REST builder for this class. */
public static Builder newHttpJsonBuilder() {
return Builder.createHttpJsonDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected OracleDatabaseSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
}
/** Builder for OracleDatabaseSettings. */
public static class Builder extends ClientSettings.Builder<OracleDatabaseSettings, Builder> {
protected Builder() throws IOException {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(OracleDatabaseStubSettings.newBuilder(clientContext));
}
protected Builder(OracleDatabaseSettings settings) {
super(settings.getStubSettings().toBuilder());
}
protected Builder(OracleDatabaseStubSettings.Builder stubSettings) {
super(stubSettings);
}
private static Builder createDefault() {
return new Builder(OracleDatabaseStubSettings.newBuilder());
}
private static Builder createHttpJsonDefault() {
return new Builder(OracleDatabaseStubSettings.newHttpJsonBuilder());
}
public OracleDatabaseStubSettings.Builder getStubSettingsBuilder() {
return ((OracleDatabaseStubSettings.Builder) getStubSettings());
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(
getStubSettingsBuilder().unaryMethodSettingsBuilders(), settingsUpdater);
return this;
}
/** Returns the builder for the settings used for calls to listCloudExadataInfrastructures. */
public PagedCallSettings.Builder<
ListCloudExadataInfrastructuresRequest,
ListCloudExadataInfrastructuresResponse,
ListCloudExadataInfrastructuresPagedResponse>
listCloudExadataInfrastructuresSettings() {
return getStubSettingsBuilder().listCloudExadataInfrastructuresSettings();
}
/** Returns the builder for the settings used for calls to getCloudExadataInfrastructure. */
public UnaryCallSettings.Builder<
GetCloudExadataInfrastructureRequest, CloudExadataInfrastructure>
getCloudExadataInfrastructureSettings() {
return getStubSettingsBuilder().getCloudExadataInfrastructureSettings();
}
/** Returns the builder for the settings used for calls to createCloudExadataInfrastructure. */
public UnaryCallSettings.Builder<CreateCloudExadataInfrastructureRequest, Operation>
createCloudExadataInfrastructureSettings() {
return getStubSettingsBuilder().createCloudExadataInfrastructureSettings();
}
/** Returns the builder for the settings used for calls to createCloudExadataInfrastructure. */
public OperationCallSettings.Builder<
CreateCloudExadataInfrastructureRequest, CloudExadataInfrastructure, OperationMetadata>
createCloudExadataInfrastructureOperationSettings() {
return getStubSettingsBuilder().createCloudExadataInfrastructureOperationSettings();
}
/** Returns the builder for the settings used for calls to deleteCloudExadataInfrastructure. */
public UnaryCallSettings.Builder<DeleteCloudExadataInfrastructureRequest, Operation>
deleteCloudExadataInfrastructureSettings() {
return getStubSettingsBuilder().deleteCloudExadataInfrastructureSettings();
}
/** Returns the builder for the settings used for calls to deleteCloudExadataInfrastructure. */
public OperationCallSettings.Builder<
DeleteCloudExadataInfrastructureRequest, Empty, OperationMetadata>
deleteCloudExadataInfrastructureOperationSettings() {
return getStubSettingsBuilder().deleteCloudExadataInfrastructureOperationSettings();
}
/** Returns the builder for the settings used for calls to listCloudVmClusters. */
public PagedCallSettings.Builder<
ListCloudVmClustersRequest,
ListCloudVmClustersResponse,
ListCloudVmClustersPagedResponse>
listCloudVmClustersSettings() {
return getStubSettingsBuilder().listCloudVmClustersSettings();
}
/** Returns the builder for the settings used for calls to getCloudVmCluster. */
public UnaryCallSettings.Builder<GetCloudVmClusterRequest, CloudVmCluster>
getCloudVmClusterSettings() {
return getStubSettingsBuilder().getCloudVmClusterSettings();
}
/** Returns the builder for the settings used for calls to createCloudVmCluster. */
public UnaryCallSettings.Builder<CreateCloudVmClusterRequest, Operation>
createCloudVmClusterSettings() {
return getStubSettingsBuilder().createCloudVmClusterSettings();
}
/** Returns the builder for the settings used for calls to createCloudVmCluster. */
public OperationCallSettings.Builder<
CreateCloudVmClusterRequest, CloudVmCluster, OperationMetadata>
createCloudVmClusterOperationSettings() {
return getStubSettingsBuilder().createCloudVmClusterOperationSettings();
}
/** Returns the builder for the settings used for calls to deleteCloudVmCluster. */
public UnaryCallSettings.Builder<DeleteCloudVmClusterRequest, Operation>
deleteCloudVmClusterSettings() {
return getStubSettingsBuilder().deleteCloudVmClusterSettings();
}
/** Returns the builder for the settings used for calls to deleteCloudVmCluster. */
public OperationCallSettings.Builder<DeleteCloudVmClusterRequest, Empty, OperationMetadata>
deleteCloudVmClusterOperationSettings() {
return getStubSettingsBuilder().deleteCloudVmClusterOperationSettings();
}
/** Returns the builder for the settings used for calls to listEntitlements. */
public PagedCallSettings.Builder<
ListEntitlementsRequest, ListEntitlementsResponse, ListEntitlementsPagedResponse>
listEntitlementsSettings() {
return getStubSettingsBuilder().listEntitlementsSettings();
}
/** Returns the builder for the settings used for calls to listDbServers. */
public PagedCallSettings.Builder<
ListDbServersRequest, ListDbServersResponse, ListDbServersPagedResponse>
listDbServersSettings() {
return getStubSettingsBuilder().listDbServersSettings();
}
/** Returns the builder for the settings used for calls to listDbNodes. */
public PagedCallSettings.Builder<
ListDbNodesRequest, ListDbNodesResponse, ListDbNodesPagedResponse>
listDbNodesSettings() {
return getStubSettingsBuilder().listDbNodesSettings();
}
/** Returns the builder for the settings used for calls to listGiVersions. */
public PagedCallSettings.Builder<
ListGiVersionsRequest, ListGiVersionsResponse, ListGiVersionsPagedResponse>
listGiVersionsSettings() {
return getStubSettingsBuilder().listGiVersionsSettings();
}
/** Returns the builder for the settings used for calls to listDbSystemShapes. */
public PagedCallSettings.Builder<
ListDbSystemShapesRequest, ListDbSystemShapesResponse, ListDbSystemShapesPagedResponse>
listDbSystemShapesSettings() {
return getStubSettingsBuilder().listDbSystemShapesSettings();
}
/** Returns the builder for the settings used for calls to listAutonomousDatabases. */
public PagedCallSettings.Builder<
ListAutonomousDatabasesRequest,
ListAutonomousDatabasesResponse,
ListAutonomousDatabasesPagedResponse>
listAutonomousDatabasesSettings() {
return getStubSettingsBuilder().listAutonomousDatabasesSettings();
}
/** Returns the builder for the settings used for calls to getAutonomousDatabase. */
public UnaryCallSettings.Builder<GetAutonomousDatabaseRequest, AutonomousDatabase>
getAutonomousDatabaseSettings() {
return getStubSettingsBuilder().getAutonomousDatabaseSettings();
}
/** Returns the builder for the settings used for calls to createAutonomousDatabase. */
public UnaryCallSettings.Builder<CreateAutonomousDatabaseRequest, Operation>
createAutonomousDatabaseSettings() {
return getStubSettingsBuilder().createAutonomousDatabaseSettings();
}
/** Returns the builder for the settings used for calls to createAutonomousDatabase. */
public OperationCallSettings.Builder<
CreateAutonomousDatabaseRequest, AutonomousDatabase, OperationMetadata>
createAutonomousDatabaseOperationSettings() {
return getStubSettingsBuilder().createAutonomousDatabaseOperationSettings();
}
/** Returns the builder for the settings used for calls to deleteAutonomousDatabase. */
public UnaryCallSettings.Builder<DeleteAutonomousDatabaseRequest, Operation>
deleteAutonomousDatabaseSettings() {
return getStubSettingsBuilder().deleteAutonomousDatabaseSettings();
}
/** Returns the builder for the settings used for calls to deleteAutonomousDatabase. */
public OperationCallSettings.Builder<DeleteAutonomousDatabaseRequest, Empty, OperationMetadata>
deleteAutonomousDatabaseOperationSettings() {
return getStubSettingsBuilder().deleteAutonomousDatabaseOperationSettings();
}
/** Returns the builder for the settings used for calls to restoreAutonomousDatabase. */
public UnaryCallSettings.Builder<RestoreAutonomousDatabaseRequest, Operation>
restoreAutonomousDatabaseSettings() {
return getStubSettingsBuilder().restoreAutonomousDatabaseSettings();
}
/** Returns the builder for the settings used for calls to restoreAutonomousDatabase. */
public OperationCallSettings.Builder<
RestoreAutonomousDatabaseRequest, AutonomousDatabase, OperationMetadata>
restoreAutonomousDatabaseOperationSettings() {
return getStubSettingsBuilder().restoreAutonomousDatabaseOperationSettings();
}
/** Returns the builder for the settings used for calls to generateAutonomousDatabaseWallet. */
public UnaryCallSettings.Builder<
GenerateAutonomousDatabaseWalletRequest, GenerateAutonomousDatabaseWalletResponse>
generateAutonomousDatabaseWalletSettings() {
return getStubSettingsBuilder().generateAutonomousDatabaseWalletSettings();
}
/** Returns the builder for the settings used for calls to listAutonomousDbVersions. */
public PagedCallSettings.Builder<
ListAutonomousDbVersionsRequest,
ListAutonomousDbVersionsResponse,
ListAutonomousDbVersionsPagedResponse>
listAutonomousDbVersionsSettings() {
return getStubSettingsBuilder().listAutonomousDbVersionsSettings();
}
/**
* Returns the builder for the settings used for calls to listAutonomousDatabaseCharacterSets.
*/
public PagedCallSettings.Builder<
ListAutonomousDatabaseCharacterSetsRequest,
ListAutonomousDatabaseCharacterSetsResponse,
ListAutonomousDatabaseCharacterSetsPagedResponse>
listAutonomousDatabaseCharacterSetsSettings() {
return getStubSettingsBuilder().listAutonomousDatabaseCharacterSetsSettings();
}
/** Returns the builder for the settings used for calls to listAutonomousDatabaseBackups. */
public PagedCallSettings.Builder<
ListAutonomousDatabaseBackupsRequest,
ListAutonomousDatabaseBackupsResponse,
ListAutonomousDatabaseBackupsPagedResponse>
listAutonomousDatabaseBackupsSettings() {
return getStubSettingsBuilder().listAutonomousDatabaseBackupsSettings();
}
/** Returns the builder for the settings used for calls to stopAutonomousDatabase. */
public UnaryCallSettings.Builder<StopAutonomousDatabaseRequest, Operation>
stopAutonomousDatabaseSettings() {
return getStubSettingsBuilder().stopAutonomousDatabaseSettings();
}
/** Returns the builder for the settings used for calls to stopAutonomousDatabase. */
public OperationCallSettings.Builder<
StopAutonomousDatabaseRequest, AutonomousDatabase, OperationMetadata>
stopAutonomousDatabaseOperationSettings() {
return getStubSettingsBuilder().stopAutonomousDatabaseOperationSettings();
}
/** Returns the builder for the settings used for calls to startAutonomousDatabase. */
public UnaryCallSettings.Builder<StartAutonomousDatabaseRequest, Operation>
startAutonomousDatabaseSettings() {
return getStubSettingsBuilder().startAutonomousDatabaseSettings();
}
/** Returns the builder for the settings used for calls to startAutonomousDatabase. */
public OperationCallSettings.Builder<
StartAutonomousDatabaseRequest, AutonomousDatabase, OperationMetadata>
startAutonomousDatabaseOperationSettings() {
return getStubSettingsBuilder().startAutonomousDatabaseOperationSettings();
}
/** Returns the builder for the settings used for calls to restartAutonomousDatabase. */
public UnaryCallSettings.Builder<RestartAutonomousDatabaseRequest, Operation>
restartAutonomousDatabaseSettings() {
return getStubSettingsBuilder().restartAutonomousDatabaseSettings();
}
/** Returns the builder for the settings used for calls to restartAutonomousDatabase. */
public OperationCallSettings.Builder<
RestartAutonomousDatabaseRequest, AutonomousDatabase, OperationMetadata>
restartAutonomousDatabaseOperationSettings() {
return getStubSettingsBuilder().restartAutonomousDatabaseOperationSettings();
}
/** Returns the builder for the settings used for calls to listLocations. */
public PagedCallSettings.Builder<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings() {
return getStubSettingsBuilder().listLocationsSettings();
}
/** Returns the builder for the settings used for calls to getLocation. */
public UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings() {
return getStubSettingsBuilder().getLocationSettings();
}
@Override
public OracleDatabaseSettings build() throws IOException {
return new OracleDatabaseSettings(this);
}
}
}
|
apache/stanbol | 36,163 | enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/ContentItemResource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.stanbol.enhancer.jersey.resource;
import static javax.ws.rs.core.MediaType.TEXT_HTML;
import static org.apache.stanbol.enhancer.servicesapi.helper.EnhancementEngineHelper.getReference;
import static org.apache.stanbol.enhancer.servicesapi.helper.EnhancementEngineHelper.getReferences;
import static org.apache.stanbol.enhancer.servicesapi.helper.EnhancementEngineHelper.getString;
import static org.apache.stanbol.enhancer.servicesapi.rdf.OntologicalClasses.DBPEDIA_ORGANISATION;
import static org.apache.stanbol.enhancer.servicesapi.rdf.OntologicalClasses.DBPEDIA_PERSON;
import static org.apache.stanbol.enhancer.servicesapi.rdf.OntologicalClasses.DBPEDIA_PLACE;
import static org.apache.stanbol.enhancer.servicesapi.rdf.OntologicalClasses.DC_LINGUISTIC_SYSTEM;
import static org.apache.stanbol.enhancer.servicesapi.rdf.OntologicalClasses.SKOS_CONCEPT;
import static org.apache.stanbol.enhancer.servicesapi.rdf.Properties.DC_LANGUAGE;
import static org.apache.stanbol.enhancer.servicesapi.rdf.Properties.DC_RELATION;
import static org.apache.stanbol.enhancer.servicesapi.rdf.Properties.DC_TYPE;
import static org.apache.stanbol.enhancer.servicesapi.rdf.Properties.ENHANCER_CONFIDENCE;
import static org.apache.stanbol.enhancer.servicesapi.rdf.Properties.ENHANCER_END;
import static org.apache.stanbol.enhancer.servicesapi.rdf.Properties.ENHANCER_ENTITY_LABEL;
import static org.apache.stanbol.enhancer.servicesapi.rdf.Properties.ENHANCER_ENTITY_REFERENCE;
import static org.apache.stanbol.enhancer.servicesapi.rdf.Properties.ENHANCER_START;
import static org.apache.stanbol.enhancer.servicesapi.rdf.Properties.GEO_LAT;
import static org.apache.stanbol.enhancer.servicesapi.rdf.Properties.GEO_LONG;
import static org.apache.stanbol.enhancer.servicesapi.rdf.TechnicalClasses.ENHANCER_ENTITYANNOTATION;
import static org.apache.stanbol.enhancer.servicesapi.rdf.TechnicalClasses.ENHANCER_TEXTANNOTATION;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeMap;
import javax.ws.rs.GET;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.UriInfo;
import org.apache.clerezza.commons.rdf.Language;
import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
import org.apache.clerezza.commons.rdf.RDFTerm;
import org.apache.clerezza.commons.rdf.Triple;
import org.apache.clerezza.commons.rdf.Graph;
import org.apache.clerezza.commons.rdf.IRI;
import org.apache.clerezza.commons.rdf.Literal;
import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
import org.apache.clerezza.rdf.core.LiteralFactory;
import org.apache.clerezza.rdf.core.serializedform.Serializer;
import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
import org.apache.clerezza.rdf.core.sparql.ParseException;
import org.apache.clerezza.rdf.ontologies.RDF;
import org.apache.commons.lang.StringUtils;
import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
import org.apache.stanbol.commons.viewable.Viewable;
import org.apache.stanbol.commons.web.base.resource.BaseStanbolResource;
import org.apache.stanbol.commons.web.base.resource.LayoutConfiguration;
import org.apache.stanbol.commons.web.base.resource.TemplateLayoutConfiguration;
import org.apache.stanbol.enhancer.servicesapi.Blob;
import org.apache.stanbol.enhancer.servicesapi.ContentItem;
import org.apache.stanbol.enhancer.servicesapi.EnhancementException;
import org.apache.stanbol.enhancer.servicesapi.NoSuchPartException;
import org.apache.stanbol.enhancer.servicesapi.helper.ContentItemHelper;
import org.apache.stanbol.enhancer.servicesapi.helper.EnhancementEngineHelper;
import org.apache.stanbol.enhancer.servicesapi.helper.ExecutionMetadataHelper;
import org.apache.stanbol.enhancer.servicesapi.helper.execution.ChainExecution;
import org.apache.stanbol.enhancer.servicesapi.helper.execution.Execution;
import org.apache.stanbol.enhancer.servicesapi.rdf.ExecutionMetadata;
import org.apache.stanbol.enhancer.servicesapi.rdf.OntologicalClasses;
import org.apache.stanbol.enhancer.servicesapi.rdf.Properties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ContentItemResource extends TemplateLayoutConfiguration {
private final Logger log = LoggerFactory.getLogger(getClass());
// TODO make this configurable trough a property
public static final IRI SUMMARY = new IRI("http://www.w3.org/2000/01/rdf-schema#comment");
// TODO make this configurable trough a property
public static final IRI THUMBNAIL = new IRI("http://dbpedia.org/ontology/thumbnail");
public static final IRI DEPICTION = new IRI("http://xmlns.com/foaf/0.1/depiction");
public final Map<IRI,String> defaultThumbnails = new HashMap<IRI,String>();
protected ContentItem contentItem;
protected String localId;
protected String textContent;
protected URI imageSrc;
protected URI downloadHref;
protected URI metadataHref;
protected final Serializer serializer;
protected String serializationFormat = SupportedFormat.RDF_XML;
/**
* Used to format dates on the UI
*/
protected DateFormat format = new SimpleDateFormat("HH-mm-ss.SSS");
/**
* Map holding the extraction mapped by {@link Properties#DC_TYPE} and the
* {@link Properties#ENHANCER_SELECTED_TEXT}.
* This map is initialised by {@link #initOccurrences()}.
*/
protected Map<IRI,Map<EntityExtractionSummary,EntityExtractionSummary>> extractionsByTypeMap =
new HashMap<IRI,Map<EntityExtractionSummary,EntityExtractionSummary>>();
private Graph executionMetadata;
private ChainExecution chainExecution;
private List<org.apache.stanbol.enhancer.servicesapi.helper.execution.Execution> engineExecutions;
private EnhancementException enhancementException;
private LayoutConfiguration layoutConfiguration;
private UriInfo uriInfo;
public ContentItemResource(String localId,
ContentItem ci,
UriInfo uriInfo,
String storePath,
Serializer serializer,
LayoutConfiguration layoutConfiguration,
EnhancementException enhancementException) throws IOException {
this.contentItem = ci;
this.localId = localId;
this.uriInfo = uriInfo;
this.serializer = serializer;
this.layoutConfiguration = layoutConfiguration;
//this.servletContext = servletContext;
this.enhancementException = enhancementException;
if (localId != null) {
URI rawURI = uriInfo.getBaseUriBuilder().path(storePath).path("raw").path(localId).build();
Entry<IRI,Blob> plainTextContentPart = ContentItemHelper.getBlob(contentItem, Collections.singleton("text/plain"));
if (plainTextContentPart != null) {
this.textContent = ContentItemHelper.getText(plainTextContentPart.getValue());
}
if (ci.getBlob().getMimeType().startsWith("image/")) {
this.imageSrc = rawURI;
}
this.downloadHref = rawURI;
this.metadataHref = uriInfo.getBaseUriBuilder().path(storePath).path("metadata").path(localId).build();
}
defaultThumbnails.put(DBPEDIA_PERSON, getStaticRootUrl() + "/home/images/user_48.png");
defaultThumbnails.put(DBPEDIA_ORGANISATION, getStaticRootUrl() + "/home/images/organization_48.png");
defaultThumbnails.put(DBPEDIA_PLACE, getStaticRootUrl() + "/home/images/compass_48.png");
defaultThumbnails.put(SKOS_CONCEPT, getStaticRootUrl() + "/home/images/black_gear_48.png");
defaultThumbnails.put(DC_LINGUISTIC_SYSTEM, getStaticRootUrl()+"/home/images/language_48.png");
defaultThumbnails.put(null, getStaticRootUrl() + "/home/images/unknown_48.png");
long start = System.currentTimeMillis();
if(enhancementException == null){
initOccurrences();
}
//init ExecutionMetadata
try {
executionMetadata = ci.getPart(ExecutionMetadata.CHAIN_EXECUTION, Graph.class);
} catch(NoSuchPartException e){
executionMetadata = null;
}
if(executionMetadata != null){
BlankNodeOrIRI ce = ExecutionMetadataHelper.getChainExecution(executionMetadata, ci.getUri());
if(ce != null){
chainExecution = new ChainExecution(executionMetadata, ce);
engineExecutions = new ArrayList<Execution>();
for(BlankNodeOrIRI ex : ExecutionMetadataHelper.getExecutions(executionMetadata, ce)){
engineExecutions.add(new Execution(chainExecution,executionMetadata, ex));
}
Collections.sort(engineExecutions);
} else {
chainExecution = null;
engineExecutions = null;
}
}
log.info(" ... {}ms fro parsing Enhancement Reuslts",System.currentTimeMillis()-start);
}
@Override
protected LayoutConfiguration getLayoutConfiguration() {
return layoutConfiguration;
}
@Override
protected UriInfo getUriInfo() {
return uriInfo;
}
public String getRdfMetadata(String mediatype) throws UnsupportedEncodingException {
if(enhancementException == null){
ByteArrayOutputStream out = new ByteArrayOutputStream();
serializer.serialize(out, contentItem.getMetadata(), mediatype);
return out.toString("utf-8");
} else {//in case of an exception print the stacktrace
StringWriter writer = new StringWriter();
enhancementException.printStackTrace(new PrintWriter(writer));
return writer.toString();
}
}
public String getRdfMetadata() throws UnsupportedEncodingException {
return getRdfMetadata(serializationFormat);
}
public ContentItem getContentItem() {
return contentItem;
}
public String getLocalId() {
return localId;
}
public String getTextContent() {
return textContent;
}
public URI getImageSrc() {
return imageSrc;
}
public URI getDownloadHref() {
return downloadHref;
}
public URI getMetadataHref() {
return metadataHref;
}
/**
* Checks if there are Occurrences
*/
public boolean hasOccurrences(){
for(Map<EntityExtractionSummary,EntityExtractionSummary> occ : extractionsByTypeMap.values()){
if(!occ.isEmpty()){
return true;
}
}
return false;
}
/**
* Used to print occurrences with other types than the natively supported
*/
public Collection<IRI> getOtherOccurrencyTypes(){
Set<IRI> types = new HashSet<IRI>(extractionsByTypeMap.keySet());
types.remove(DBPEDIA_PERSON);
types.remove(DBPEDIA_ORGANISATION);
types.remove(DBPEDIA_PLACE);
types.remove(SKOS_CONCEPT);
types.remove(DC_LINGUISTIC_SYSTEM);
types.remove(null); //other
return types;
}
public static String extractLabel(IRI uri){
String fullUri = uri.getUnicodeString();
int index = Math.max(fullUri.lastIndexOf('#'),fullUri.lastIndexOf('/'));
index = Math.max(index, fullUri.lastIndexOf(':'));
//do not convert if the parsed uri does not contain a local name
if(index > 0 && index+1 < fullUri.length()){
return StringUtils.capitalize(fullUri.substring(index+1).replaceAll("[\\-_]", " "));
} else {
return uri.getUnicodeString();
}
}
public Collection<EntityExtractionSummary> getOccurrences(IRI type){
Map<EntityExtractionSummary,EntityExtractionSummary> typeMap = extractionsByTypeMap.get(type);
Collection<EntityExtractionSummary> typeOccurrences;
if(typeMap != null){
typeOccurrences = typeMap.values();
} else {
typeOccurrences = Collections.emptyList();
}
return typeOccurrences;
}
public Collection<EntityExtractionSummary> getPersonOccurrences() throws ParseException {
return getOccurrences(DBPEDIA_PERSON);
}
public Collection<EntityExtractionSummary> getOtherOccurrences() throws ParseException {
return getOccurrences(null);
}
public Collection<EntityExtractionSummary> getOrganizationOccurrences() throws ParseException {
return getOccurrences(DBPEDIA_ORGANISATION);
}
public Collection<EntityExtractionSummary> getPlaceOccurrences() throws ParseException {
return getOccurrences(DBPEDIA_PLACE);
}
public Collection<EntityExtractionSummary> getConceptOccurrences() throws ParseException {
return getOccurrences(SKOS_CONCEPT);
}
/**
* Returns the Language Annotations
* @since 0.11.0
* @return
* @throws ParseException
*/
public Collection<EntityExtractionSummary> getLanguageOccurrences() throws ParseException {
return getOccurrences(OntologicalClasses.DC_LINGUISTIC_SYSTEM);
}
enum EAProps {
label,
entity,
confidence
}
private void initOccurrences() {
Graph graph = contentItem.getMetadata();
LiteralFactory lf = LiteralFactory.getInstance();
Map<IRI,Collection<BlankNodeOrIRI>> suggestionMap = new HashMap<IRI,Collection<BlankNodeOrIRI>>();
// 1) get Entity Annotations
Map<BlankNodeOrIRI,Map<EAProps,Object>> entitySuggestionMap = new HashMap<BlankNodeOrIRI,Map<EAProps,Object>>();
Iterator<Triple> entityAnnotations = graph.filter(null, RDF.type, ENHANCER_ENTITYANNOTATION);
while(entityAnnotations.hasNext()){
BlankNodeOrIRI entityAnnotation = entityAnnotations.next().getSubject();
//to avoid multiple lookups (e.g. if one entityAnnotation links to+
//several TextAnnotations) we cache the data in an intermediate Map
Map<EAProps,Object> eaData = new EnumMap<EAProps,Object>(EAProps.class);
eaData.put(EAProps.entity, getReference(graph, entityAnnotation, ENHANCER_ENTITY_REFERENCE));
eaData.put(EAProps.label, getString(graph, entityAnnotation, ENHANCER_ENTITY_LABEL));
eaData.put(EAProps.confidence, EnhancementEngineHelper.get(
graph, entityAnnotation, ENHANCER_CONFIDENCE, Double.class, lf));
entitySuggestionMap.put(entityAnnotation, eaData);
Iterator<IRI> textAnnotations = getReferences(graph, entityAnnotation, DC_RELATION);
while(textAnnotations.hasNext()){
IRI textAnnotation = textAnnotations.next();
Collection<BlankNodeOrIRI> suggestions = suggestionMap.get(textAnnotation);
if(suggestions == null){
suggestions = new ArrayList<BlankNodeOrIRI>();
suggestionMap.put(textAnnotation, suggestions);
}
suggestions.add(entityAnnotation);
}
}
// 2) get the TextAnnotations
Iterator<Triple> textAnnotations = graph.filter(null, RDF.type, ENHANCER_TEXTANNOTATION);
while(textAnnotations.hasNext()){
BlankNodeOrIRI textAnnotation = textAnnotations.next().getSubject();
//we need to process those to show multiple mentions
// if (graph.filter(textAnnotation, DC_RELATION, null).hasNext()) {
// // this is not the most specific occurrence of this name: skip
// continue;
// }
String text = getString(graph, textAnnotation, Properties.ENHANCER_SELECTED_TEXT);
//TextAnnotations without fise:selected-text are no longer ignored
// if(text == null){
// //ignore text annotations without text
// continue;
// }
Integer start = EnhancementEngineHelper.get(graph,textAnnotation,
ENHANCER_START,Integer.class,lf);
Integer end = EnhancementEngineHelper.get(graph,textAnnotation,
ENHANCER_END,Integer.class,lf);
Double confidence = EnhancementEngineHelper.get(graph, textAnnotation,
ENHANCER_CONFIDENCE, Double.class, lf);
Iterator<IRI> types = getReferences(graph, textAnnotation, DC_TYPE);
if(!types.hasNext()){ //create an iterator over null in case no types are present
types = Collections.singleton((IRI)null).iterator();
}
while(types.hasNext()){
IRI type = types.next();
Map<EntityExtractionSummary,EntityExtractionSummary> occurrenceMap = extractionsByTypeMap.get(type);
if(occurrenceMap == null){
occurrenceMap = new TreeMap<EntityExtractionSummary,EntityExtractionSummary>();
extractionsByTypeMap.put(type, occurrenceMap);
}
//in case of a language annotation use the detected language as label
if(DC_LINGUISTIC_SYSTEM.equals(type)){
text = EnhancementEngineHelper.getString(graph, textAnnotation,
DC_LANGUAGE);
}
EntityExtractionSummary entity = new EntityExtractionSummary(text, type, start,end,confidence,defaultThumbnails);
Collection<BlankNodeOrIRI> suggestions = suggestionMap.get(textAnnotation);
if(suggestions != null){
for(BlankNodeOrIRI entityAnnotation : suggestions){
Map<EAProps,Object> eaData = entitySuggestionMap.get(entityAnnotation);
entity.addSuggestion(
(IRI)eaData.get(EAProps.entity),
(String)eaData.get(EAProps.label),
(Double)eaData.get(EAProps.confidence),
graph);
}
}
EntityExtractionSummary existingSummary = occurrenceMap.get(entity);
if(existingSummary == null){//new extraction summary
occurrenceMap.put(entity, entity);
} else {
//extraction summary with this text and suggestions already
//present ... only add a mention to the existing
existingSummary.addMention(new Mention(text, start, end, confidence));
}
}
}
}
/**
* Mentions of {@link EntityExtractionSummary EntityExtractionSummaries}.
* @author Rupert Westenthaler
*
*/
public static class Mention implements Comparable<Mention>{
private String name;
private Integer start;
private Integer end;
private Double conf;
Mention(String name,Integer start, Integer end, Double confidence){
if(name == null && start == null && end == null){
this.name = "[global]";
//throw new IllegalStateException("The name for a Mention MUST NOT be NULL!");
} else if(name == null) {
this.name = "[section]";
} else {
this.name = name;
}
this.start = start;
this.end = end;
this.conf = confidence;
}
public String getName() {
return name;
}
public Integer getStart() {
return start;
}
public Integer getEnd() {
return end;
}
public Double getConfidence() {
return conf;
}
public boolean hasOccurrence() {
return start != null && end != null;
}
public boolean hasConfidence(){
return conf != null;
}
@Override
public int hashCode() {
return name.hashCode() +
(start != null ? start.hashCode() : 0) +
(end != null ? end.hashCode() : 0);
}
@Override
public boolean equals(Object obj) {
if(obj instanceof Mention){
Mention o = (Mention)obj;
if(o.name.equals(name)){
if((o.start != null && o.start.equals(start)) ||
(o.start == null && start == null)){
if(o.end != null && o.end.equals(end)){
return true;
} else {
return o.end == null && end == null;
}
}
}
}
return false;
}
@Override
public int compareTo(Mention o) {
int c = String.CASE_INSENSITIVE_ORDER.compare(o.name, this.name);
if(c == 0){
if(start != null && o.start != null){
c = start.compareTo(o.start);
} else if(o.start != null){
c = 1;
} else if(start != null){
c = -1;
}
if(c == 0){
if(o.end != null && end != null){
c = end.compareTo(o.end);
} else if(o.end != null){
c = -1;
} else if(end != null){
c = 1;
}
}
}
return c;
}
}
public ChainExecution getChainExecution(){
return chainExecution;
}
public Collection<Execution> getEngineExecutions(){
return engineExecutions;
}
public String getExecutionOffsetText(Execution ex){
if(ex.getChain() == null || ex.getChain().getStarted() == null || ex.getStarted() == null){
return null;
} else {
return String.format("%6dms",ex.getStarted().getTime() - ex.getChain().getStarted().getTime());
}
}
public String getExecutionDurationText(Execution ex){
if(ex.getDuration() == null){
return "[duration not available]";
} else if(ex.getDuration() < 1025){
return ex.getDuration()+"ms";
} else {
return String.format("%.2fsec",(ex.getDuration().floatValue()/1000));
}
}
public String getExecutionStartTime(Execution ex){
if(ex.getStarted() != null){
return format.format(ex.getStarted());
} else {
return "unknown";
}
}
public String getExecutionCompletionTime(Execution ex){
if(ex.getCompleted() != null){
return format.format(ex.getCompleted());
} else {
return "unknown";
}
}
public String getExecutionStatusText(Execution ex){
if(ExecutionMetadata.STATUS_COMPLETED.equals(ex.getStatus())){
return "completed";
} else if(ExecutionMetadata.STATUS_FAILED.equals(ex.getStatus())){
return "failed";
} else if(ExecutionMetadata.STATUS_IN_PROGRESS.equals(ex.getStatus())){
return "in-progress";
} else if(ExecutionMetadata.STATUS_SCHEDULED.equals(ex.getStatus())){
return "scheduled";
} else if(ExecutionMetadata.STATUS_SKIPPED.equals(ex.getStatus())){
return "skipped";
} else {
return "unknown";
}
}
public static class EntityExtractionSummary implements Comparable<EntityExtractionSummary> {
protected final String name;
protected final IRI type;
protected List<EntitySuggestion> suggestions = new ArrayList<EntitySuggestion>();
protected Set<IRI> suggestionSet = new HashSet<IRI>();
protected List<Mention> mentions = new ArrayList<Mention>();
public final Map<IRI,String> defaultThumbnails;
private Integer start;
private Integer end;
private Double confidence;
public EntityExtractionSummary(String name, IRI type, Integer start, Integer end, Double confidence, Map<IRI,String> defaultThumbnails) {
if(name == null){
this.name = extractLabel(type);
} else {
this.name = name;
}
this.type = type;
mentions.add(new Mention(name, start, end, confidence));
this.defaultThumbnails = defaultThumbnails;
this.start = start;
this.end = end;
this.confidence = confidence;
}
public void addSuggestion(IRI uri, String label, Double confidence, Graph properties) {
EntitySuggestion suggestion = new EntitySuggestion(uri, type, label, confidence, properties,
defaultThumbnails);
suggestionSet.add(uri);
if (!suggestions.contains(suggestion)) {
suggestions.add(suggestion);
Collections.sort(suggestions);
}
}
public void addMention(Mention mention){
if(!mentions.contains(mention)){
mentions.add(mention);
Collections.sort(mentions);
}
}
public String getName() {
EntitySuggestion bestGuess = getBestGuess();
if (bestGuess != null) {
return bestGuess.getLabel();
}
return name;
}
public String getSelected(){
return name;
}
public String getUri() {
EntitySuggestion bestGuess = getBestGuess();
if (bestGuess != null) {
return bestGuess.getUri();
}
return null;
}
public Double getConfidence(){
EntitySuggestion bestGuess = getBestGuess();
if (bestGuess != null) {
return bestGuess.getConfidence();
}
return confidence;
}
public String getSummary() {
if (suggestions.isEmpty()) {
return "";
}
return suggestions.get(0).getSummary();
}
public Integer getStart() {
return start;
}
public Integer getEnd() {
return end;
}
public boolean hasOccurrence(){
return start != null && end != null;
}
public String getThumbnailSrc() {
if (suggestions.isEmpty()) {
return getMissingThumbnailSrc();
}
return suggestions.get(0).getThumbnailSrc();
}
public String getMissingThumbnailSrc() {
String source = defaultThumbnails.get(type);
if(source == null){
source = defaultThumbnails.get(null);//default
}
return source;
}
public EntitySuggestion getBestGuess() {
if (suggestions.isEmpty()) {
return null;
}
return suggestions.get(0);
}
public List<EntitySuggestion> getSuggestions() {
return suggestions;
}
public List<Mention> getMentions() {
return mentions;
}
@Override
public int compareTo(EntityExtractionSummary o) {
int c = String.CASE_INSENSITIVE_ORDER.compare(getName(),o.getName());
if(c == 0){
if(suggestionSet.equals(o.suggestionSet)){
return 0; //assume as equals if name and suggestionSet is the same
} else { //sort by mention
if(start != null && o.start != null){
c = start.compareTo(o.start);
} else if(o.start != null){
c = 1;
} else if(start != null){
c = -1;
}
if(c == 0){
if(o.end != null && end != null){
c = end.compareTo(o.end);
} else if(o.end != null){
c = -1;
} else if(end != null){
c = 1;
}
}
}
}
return c;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
EntityExtractionSummary that = (EntityExtractionSummary) o;
//if name and suggestions are the same ... consider as equals
if(getName().equalsIgnoreCase(getName())){
return suggestionSet.equals(that.suggestionSet);
} else {
return false;
}
//return !(name != null ? !name.equals(that.name) : that.name != null);
}
@Override
public int hashCode() {
return name != null ? name.hashCode() : 0;
}
}
public static class EntitySuggestion implements Comparable<EntitySuggestion> {
protected final IRI uri;
protected final IRI type;
protected final String label;
protected final Double confidence;
protected Graph entityProperties;
protected final Map<IRI,String> defaultThumbnails;
public EntitySuggestion(IRI uri,
IRI type,
String label,
Double confidence,
Graph entityProperties,
Map<IRI,String> defaultThumbnails) {
this.uri = uri;
if(label == null){
this.label = extractLabel(uri);
} else {
this.label = label;
}
this.type = type;
this.confidence = confidence != null ? confidence : 0.0;
this.entityProperties = entityProperties;
this.defaultThumbnails = defaultThumbnails;
}
@Override
public int compareTo(EntitySuggestion o) {
// order suggestions by decreasing confidence
return -confidence.compareTo(o.confidence);
}
public String getUri() {
return uri.getUnicodeString();
}
public Double getConfidence() {
return confidence;
}
public String getLabel() {
return label;
}
public String getThumbnailSrc() {
Iterator<Triple> thumbnails = entityProperties.filter(uri, THUMBNAIL, null);
while (thumbnails.hasNext()) {
RDFTerm object = thumbnails.next().getObject();
if (object instanceof IRI) {
return ((IRI) object).getUnicodeString();
}
}
//if no dbpedia ontology thumbnail was found. try the same with foaf:depiction
thumbnails = entityProperties.filter(uri, DEPICTION, null);
while (thumbnails.hasNext()) {
RDFTerm object = thumbnails.next().getObject();
if (object instanceof IRI) {
return ((IRI) object).getUnicodeString();
}
}
return getMissingThumbnailSrc();
}
public String getMissingThumbnailSrc() {
String source = defaultThumbnails.get(type);
if(source == null){
source = defaultThumbnails.get(null);
}
return source;
}
public String getSummary() {
Iterator<Triple> abstracts = entityProperties.filter(uri, SUMMARY, null);
while (abstracts.hasNext()) {
RDFTerm object = abstracts.next().getObject();
if (object instanceof Literal) {
Literal abstract_ = (Literal) object;
if (new Language("en").equals(abstract_.getLanguage())) {
return abstract_.getLexicalForm();
}
}
}
return "";
}
// consider entities with same URI as equal even if we have alternate
// label values
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((uri == null) ? 0 : uri.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (getClass() != obj.getClass()) return false;
EntitySuggestion other = (EntitySuggestion) obj;
if (uri == null) {
if (other.uri != null) return false;
} else if (!uri.equals(other.uri)) return false;
return true;
}
}
public void setRdfSerializationFormat(String format) {
serializationFormat = format;
}
/**
* @return an RDF/JSON descriptions of places for the word map widget
*/
public String getPlacesAsJSON() throws ParseException, UnsupportedEncodingException {
Graph g = new IndexedGraph();
LiteralFactory lf = LiteralFactory.getInstance();
Graph metadata = contentItem.getMetadata();
for (EntityExtractionSummary p : getPlaceOccurrences()) {
EntitySuggestion bestGuess = p.getBestGuess();
if (bestGuess == null) {
continue;
}
IRI uri = new IRI(bestGuess.getUri());
Iterator<Triple> latitudes = metadata.filter(uri, GEO_LAT, null);
if (latitudes.hasNext()) {
g.add(latitudes.next());
}
Iterator<Triple> longitutes = metadata.filter(uri, GEO_LONG, null);
if (longitutes.hasNext()) {
g.add(longitutes.next());
g.add(new TripleImpl(uri, Properties.RDFS_LABEL, lf.createTypedLiteral(bestGuess.getLabel())));
}
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
serializer.serialize(out, g, SupportedFormat.RDF_JSON);
String rdfString = out.toString("utf-8");
return rdfString;
}
@GET
@Produces(TEXT_HTML)
public Response get(@Context HttpHeaders headers) {
ResponseBuilder rb = Response.ok(new Viewable("index", this));
rb.header(HttpHeaders.CONTENT_TYPE, TEXT_HTML+"; charset=utf-8");
// addCORSOrigin(servletContext,rb, headers);
return rb.build();
}
}
|
google-ar/WebARonTango | 36,032 | chromium/src/android_webview/test/shell/src/org/chromium/android_webview/shell/AwShellActivity.java | // Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.android_webview.shell;
import android.Manifest;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
import android.content.ServiceConnection;
import android.content.ComponentName;
import android.content.DialogInterface;
import android.content.pm.PackageManager;
import android.content.res.Configuration;
import android.os.IBinder;
import android.os.Bundle;
import android.text.TextUtils;
import android.view.Gravity;
import android.view.KeyEvent;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.View.OnFocusChangeListener;
import android.view.ViewGroup;
import android.view.ViewGroup.LayoutParams;
import android.view.WindowManager;
import android.view.Display;
import android.view.inputmethod.EditorInfo;
import android.view.inputmethod.InputMethodManager;
import android.webkit.GeolocationPermissions;
import android.webkit.WebChromeClient;
import android.widget.EditText;
import android.widget.FrameLayout;
import android.widget.ImageButton;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.TextView.OnEditorActionListener;
import android.support.v4.content.ContextCompat;
import android.support.v4.app.ActivityCompat;
import android.support.v4.app.ActivityCompat.OnRequestPermissionsResultCallback;
import android.hardware.Camera;
import org.chromium.android_webview.AwBrowserContext;
import org.chromium.android_webview.AwBrowserProcess;
import org.chromium.android_webview.AwContents;
import org.chromium.android_webview.AwContentsClient;
import org.chromium.android_webview.AwDevToolsServer;
import org.chromium.android_webview.AwSettings;
import org.chromium.android_webview.test.AwTestContainerView;
import org.chromium.android_webview.test.NullContentsClient;
import org.chromium.android_webview.JsResultReceiver;
import org.chromium.android_webview.JsPromptResultReceiver;
import org.chromium.android_webview.AwWebResourceResponse;
import org.chromium.android_webview.AwContentsClient.AwWebResourceRequest;
import org.chromium.android_webview.permission.AwPermissionRequest;
import org.chromium.base.BaseSwitches;
import org.chromium.base.CommandLine;
import org.chromium.base.ContextUtils;
import org.chromium.base.Log;
import org.chromium.base.TraceEvent;
import org.chromium.content.app.ContentApplication;
import org.chromium.content_public.browser.NavigationController;
import org.chromium.content_public.browser.WebContents;
import java.net.URI;
import java.net.URL;
import java.net.URISyntaxException;
import java.net.MalformedURLException;
import java.util.Locale;
import com.google.zxing.integration.android.IntentIntegrator;
import com.google.zxing.integration.android.IntentResult;
import java.util.ArrayList;
import org.json.JSONException;
import org.json.JSONObject;
import org.json.JSONArray;
import android.speech.SpeechRecognizer;
import android.speech.RecognizerIntent;
import android.speech.RecognitionListener;
import android.webkit.JavascriptInterface;
/**
* This is a lightweight activity for tests that only require WebView functionality.
*/
public class AwShellActivity extends Activity implements OnRequestPermissionsResultCallback {
private static final String TAG = "cr.AwShellActivity";
private static final String PREFERENCES_NAME = "AwShellPrefs";
private static final String INITIAL_URL = "about:blank";
private static final String LAST_USED_URL_PREFERENCE_NAME = "url";
private static final int ADF_PERMISSION_ID = 2;
private static final int CAMERA_ID = 0;
private static final int MULTIPLE_PERMISSIONS_REQUEST_CODE = 12345;
private static final String[] PERMISSIONS = new String[] {
Manifest.permission.CAMERA
, Manifest.permission.READ_EXTERNAL_STORAGE
, Manifest.permission.RECORD_AUDIO
, Manifest.permission.MODIFY_AUDIO_SETTINGS
, Manifest.permission.ACCESS_COARSE_LOCATION
}; private AwBrowserContext mBrowserContext;
private AwDevToolsServer mDevToolsServer;
private AwTestContainerView mAwTestContainerView;
private WebContents mWebContents;
private NavigationController mNavigationController;
private EditText mUrlTextView;
private ImageButton mPrevButton;
private ImageButton mNextButton;
private ImageButton mQRCodeButton;
private boolean mInitialized = false;
private boolean mAllPermissionsGranted = false;
private boolean mResumed = false;
private String mStartupUrl;
private class SpeechRecognition implements RecognitionListener
{
public static final String JS_INTERFACE_INSTANCE_NAME = "webarSpeechRecognitionInstance";
private SpeechRecognizer speechRecognizer = null;
private Intent speechRecognizerIntent = null;
private Runnable startRunnable = new Runnable()
{
@Override
public void run()
{
speechRecognizer.startListening(speechRecognizerIntent);
}
};
private Runnable stopRunnable = new Runnable()
{
@Override
public void run()
{
speechRecognizer.cancel();
}
};
public SpeechRecognition()
{
speechRecognizer = SpeechRecognizer.createSpeechRecognizer(AwShellActivity.this);
speechRecognizerIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
speechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
speechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, AwShellActivity.this.getPackageName());
speechRecognizer.setRecognitionListener(this);
}
void destroy()
{
speechRecognizer.destroy();
}
@Override
public void onBeginningOfSpeech()
{
// System.out.println("SpeechRecognitionListener.onBeginningOfSpeech");
dispatchEventToJSInterfaceInstance("speechstart", "{}");
}
@Override
public void onBufferReceived(byte[] buffer)
{
// System.out.println("SpeechRecognitionListener.onBufferReceived");
}
@Override
public void onEndOfSpeech()
{
// System.out.println("SpeechRecognitionListener.onEndOfSpeech");
dispatchEventToJSInterfaceInstance("speechend", "{}");
}
@Override
public void onError(int error)
{
// System.out.println("SpeechRecognitionListener.onError: " + error);
String errorString = "Unknown error.";
switch(error)
{
case SpeechRecognizer.ERROR_AUDIO:
errorString = "Audio recording error.";
break;
case SpeechRecognizer.ERROR_CLIENT:
errorString = "Other client side errors.";
break;
case SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS:
errorString = "Insufficient permissions";
break;
case SpeechRecognizer.ERROR_NETWORK:
errorString = "Other network related errors.";
break;
case SpeechRecognizer.ERROR_NETWORK_TIMEOUT:
errorString = "Network operation timed out.";
break;
case SpeechRecognizer.ERROR_NO_MATCH:
errorString = "No recognition result matched.";
break;
case SpeechRecognizer.ERROR_RECOGNIZER_BUSY:
errorString = "RecognitionService busy.";
break;
case SpeechRecognizer.ERROR_SERVER:
errorString = "Server sends error status.";
break;
case SpeechRecognizer.ERROR_SPEECH_TIMEOUT:
errorString = "No speech input or timeout.";
break;
}
dispatchEventToJSInterfaceInstance("error", "{ error: '" + errorString + "'}");
}
@Override
public void onEvent(int eventType, Bundle params)
{
// System.out.println("SpeechRecognitionListener.onEvent");
}
@Override
public void onPartialResults(Bundle partialResults)
{
// System.out.println("SpeechRecognitionListener.onPartialResults");
}
@Override
public void onReadyForSpeech(Bundle params)
{
// System.out.println("SpeechRecognitionListener.onReadyForSpeech");
dispatchEventToJSInterfaceInstance("start",
"{}");
}
@Override
public void onResults(Bundle results)
{
// System.out.println("SpeechRecognitionListener.onResults");
ArrayList<String> matches = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
float[] confidences = results.getFloatArray(SpeechRecognizer.CONFIDENCE_SCORES);
if (!matches.isEmpty())
{
try
{
JSONObject resultJSONObject = new JSONObject();
JSONArray resultsJSONArray = new JSONArray();
for (int i = 0; i < matches.size(); i++)
{
JSONObject matchJSONObject = new JSONObject();
matchJSONObject.put("isFinal", true);
matchJSONObject.put("length", 1);
JSONObject transcriptJSONObject = new JSONObject();
transcriptJSONObject.put("transcript", matches.get(i));
transcriptJSONObject.put("confidence", confidences[i]);
matchJSONObject.put("0", transcriptJSONObject);
resultsJSONArray.put(matchJSONObject);
}
resultJSONObject.put("results", resultsJSONArray);
String jsonString = resultJSONObject.toString();
// String jsonString = "{ results: [";
// for (int i = 0; i < matches.size(); i++)
// {
// jsonString += "{ isFinal: true, length: 1, 0: { transcript: '" + matches.get(i) + "', confidence: " + confidences[i] + " } }" + (i < matches.size() - 1 ? ", " : "");
// }
// jsonString += "] }";
dispatchEventToJSInterfaceInstance("result", jsonString);
dispatchEventToJSInterfaceInstance("end", jsonString);
}
catch(JSONException e)
{
dispatchEventToJSInterfaceInstance("error", "{ error: 'JSON exception while creating the speech recognition results.'}");
e.printStackTrace();
}
}
}
@Override
public void onRmsChanged(float rmsdB)
{
// System.out.println("SpeechRecognitionListener.onRmsChanged");
}
@JavascriptInterface
public void start()
{
runOnUiThread(startRunnable);
}
@JavascriptInterface
public void stop()
{
runOnUiThread(stopRunnable);
}
private void dispatchEventToJSInterfaceInstance(String eventType, String eventJSONString)
{
String jsCode = "window." + JS_INTERFACE_INSTANCE_NAME + ".callEventListeners('" + eventType + "', " + eventJSONString +");";
mAwTestContainerView.getAwContents().evaluateJavaScript(jsCode, null);
}
}
private static AlertDialog createAlertDialog(Context context, String title,
String message, DialogInterface.OnClickListener onClickListener,
int numberOfButtons, String yesButtonText, String noButtonText,
String cancelButtonText)
{
AlertDialog alertDialog = new AlertDialog.Builder(context).create();
alertDialog.setTitle(title);
alertDialog.setMessage(message);
alertDialog.setButton(AlertDialog.BUTTON_POSITIVE, yesButtonText,
onClickListener);
if (numberOfButtons > 1)
{
alertDialog.setButton(AlertDialog.BUTTON_NEGATIVE, noButtonText,
onClickListener);
}
if (numberOfButtons > 2)
{
alertDialog.setButton(AlertDialog.BUTTON_NEUTRAL, cancelButtonText,
onClickListener);
}
return alertDialog;
}
private static AlertDialog createPromptDialog(Context context, EditText editText, String title,
String message, DialogInterface.OnClickListener onClickListener,
int numberOfButtons, String yesButtonText, String noButtonText,
String cancelButtonText)
{
AlertDialog.Builder builder = new AlertDialog.Builder(context);
builder.setView(editText);
AlertDialog alertDialog = builder.create();
alertDialog.setTitle(title);
alertDialog.setMessage(message);
alertDialog.setButton(AlertDialog.BUTTON_POSITIVE, yesButtonText,
onClickListener);
if (numberOfButtons > 1)
{
alertDialog.setButton(AlertDialog.BUTTON_NEGATIVE, noButtonText,
onClickListener);
}
if (numberOfButtons > 2)
{
alertDialog.setButton(AlertDialog.BUTTON_NEUTRAL, cancelButtonText,
onClickListener);
}
return alertDialog;
}
// Tango Service connection.
ServiceConnection mTangoServiceConnection = new ServiceConnection()
{
public void onServiceConnected(ComponentName name, IBinder service)
{
TangoJniNative.onTangoServiceConnected(service);
}
public void onServiceDisconnected(ComponentName name)
{
// Handle this if you need to gracefully shutdown/retry
// in the event that Tango itself crashes/gets upgraded while
// running.
}
};
private void saveStringToPreferences(String name, String value)
{
Editor editor = getPreferences(Activity.MODE_PRIVATE).edit();
editor.putString(name, value);
if (!editor.commit())
{
System.err.println("ERROR: Could not save the preference: " + name + " - " + value);
}
}
private void requestPermissions() {
String permissionsString = "";
for (int i = 0; i < PERMISSIONS.length; i++)
{
permissionsString += PERMISSIONS[i];
}
if (ContextCompat.checkSelfPermission(this,
permissionsString)
!= PackageManager.PERMISSION_GRANTED) {
// if (ActivityCompat.shouldShowRequestPermissionRationale
// (this, Manifest.permission.READ_EXTERNAL_STORAGE) ||
// ActivityCompat.shouldShowRequestPermissionRationale
// (this, Manifest.permission.CAMERA))
// {
// }
// else
{
requestPermissions(PERMISSIONS, MULTIPLE_PERMISSIONS_REQUEST_CODE);
}
}
else
{
mAllPermissionsGranted = true;
tryToConnectWithTango();
}
}
private void requestADFPermission()
{
final String EXTRA_KEY_PERMISSIONTYPE = "PERMISSIONTYPE";
final String EXTRA_VALUE_ADF = "ADF_LOAD_SAVE_PERMISSION";
Intent intent = new Intent();
intent.setAction("android.intent.action.REQUEST_TANGO_PERMISSION");
intent.putExtra(EXTRA_KEY_PERMISSIONTYPE, EXTRA_VALUE_ADF);
startActivityForResult(intent, ADF_PERMISSION_ID);
}
@Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults)
{
System.out.println("onRequestPermissionsResult: " + requestCode + ", " + grantResults.length);
if (requestCode == MULTIPLE_PERMISSIONS_REQUEST_CODE)
{
boolean allPermissionsGranted = true;
for (int i = 0; allPermissionsGranted && i < grantResults.length; i++)
{
allPermissionsGranted &= grantResults[i] == PackageManager.PERMISSION_GRANTED;
}
mAllPermissionsGranted = allPermissionsGranted;
tryToConnectWithTango();
}
}
private void tryToConnectWithTango()
{
if (mInitialized && mResumed && mAllPermissionsGranted)
{
TangoInitializationHelper.bindTangoService(this, mTangoServiceConnection);
// Now we can finally load the URL
mAwTestContainerView.getAwContents().loadUrl(mStartupUrl);
AwContents.setShouldDownloadFavicons();
mUrlTextView.setText(mStartupUrl);
}
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (!TangoJniNative.initialize())
{
createAlertDialog(this, "Error Initializing Chromium WebAR", "This device is not currently Tango compatible. Sorry, this Chromium WebAR prototype only runs on Tango devices for now.", new DialogInterface.OnClickListener()
{
@Override
public void onClick(DialogInterface dialog, int which)
{
finish();
}
}, 1, "Ok", null, null).show();
return;
}
requestPermissions();
requestADFPermission();
WindowManager windowManager = (WindowManager) getSystemService(WINDOW_SERVICE);
Display display = windowManager.getDefaultDisplay();
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(CAMERA_ID, info);
TangoJniNative.onCreate(this, display.getRotation(), info.orientation);
CommandLine.init(new String[] { "chrome", "--ignore-gpu-blacklist", "--enable-webvr", "--enable-blink-features=ScriptedSpeech" });
AwShellResourceProvider.registerResources(this);
ContentApplication.initCommandLine(this);
waitForDebuggerIfNeeded();
ContextUtils.initApplicationContext(getApplicationContext());
AwBrowserProcess.loadLibrary();
if (CommandLine.getInstance().hasSwitch(AwShellSwitches.ENABLE_ATRACE)) {
Log.e(TAG, "Enabling Android trace.");
TraceEvent.setATraceEnabled(true);
}
setContentView(R.layout.testshell_activity);
mAwTestContainerView = createAwTestContainerView();
mWebContents = mAwTestContainerView.getContentViewCore().getWebContents();
mNavigationController = mWebContents.getNavigationController();
LinearLayout contentContainer = (LinearLayout) findViewById(R.id.content_container);
mAwTestContainerView.setLayoutParams(new LinearLayout.LayoutParams(
LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT, 1f));
contentContainer.addView(mAwTestContainerView);
mAwTestContainerView.requestFocus();
initializeUrlField();
initializeNavigationButtons();
mAwTestContainerView.getAwContents().clearCache(true);
mAwTestContainerView.getAwContents().addJavascriptInterface(new SpeechRecognition(), SpeechRecognition.JS_INTERFACE_INSTANCE_NAME);
mStartupUrl = getUrlFromIntent(getIntent());
if (TextUtils.isEmpty(mStartupUrl)) {
mStartupUrl = getPreferences(Activity.MODE_PRIVATE).getString(LAST_USED_URL_PREFERENCE_NAME, INITIAL_URL);
}
mInitialized = true;
}
@Override
protected void onResume()
{
super.onResume();
if (!mInitialized) return;
mResumed = true;
tryToConnectWithTango();
}
@Override
protected void onPause()
{
super.onPause();
if (!mInitialized) return;
mResumed = false;
// Disconnect from Tango Service, release all the resources that the app is holding from Tango Service.
if (mInitialized && mAllPermissionsGranted)
{
TangoJniNative.onPause();
unbindService(mTangoServiceConnection);
}
}
@Override
public void onDestroy() {
if (!mInitialized)
{
super.onDestroy();
return;
}
TangoJniNative.onDestroy();
if (mDevToolsServer != null) {
mDevToolsServer.destroy();
mDevToolsServer = null;
}
super.onDestroy();
}
private AwTestContainerView createAwTestContainerView() {
AwBrowserProcess.start();
AwTestContainerView testContainerView = new AwTestContainerView(this, true);
AwContentsClient awContentsClient = new NullContentsClient() {
private View mCustomView;
@Override
public void handleJsAlert(String url, String message, final JsResultReceiver receiver) {
createAlertDialog(AwShellActivity.this, url, message, new DialogInterface.OnClickListener()
{
@Override
public void onClick(DialogInterface dialog, int which)
{
receiver.confirm();
}
}, 1, "Ok", null, null).show();
}
@Override
public void handleJsPrompt(String url, String message, String defaultValue, final JsPromptResultReceiver receiver) {
final EditText editText = new EditText(AwShellActivity.this);
createPromptDialog(AwShellActivity.this, editText, url, message, new DialogInterface.OnClickListener()
{
@Override
public void onClick(DialogInterface dialog, int which)
{
if (which == DialogInterface.BUTTON_POSITIVE)
{
receiver.confirm(editText.getText().toString());
}
else
{
receiver.cancel();
}
}
}, 2, "Ok", "Cancel", null).show();
}
@Override
public void handleJsConfirm(String url, String message, final JsResultReceiver receiver) {
createAlertDialog(AwShellActivity.this, url, message, new DialogInterface.OnClickListener()
{
@Override
public void onClick(DialogInterface dialog, int which)
{
if (which == DialogInterface.BUTTON_POSITIVE)
{
receiver.confirm();
}
else
{
receiver.cancel();
}
}
}, 2, "Yes", "No", null).show();
}
@Override
public void onReceivedError(int errorCode, String description, String failingUrl) {
createAlertDialog(AwShellActivity.this, "ERROR: " + errorCode, failingUrl + ": " + description, null, 1, "Ok", null, null).show();
}
// @Override
// public void onReceivedError2(AwWebResourceRequest request, AwWebResourceError error) {
// String failingUrl = request.url;
// int errorCode = error.errorCode;
// String description = error.description;
// createAlertDialog(AwShellActivity.this, "ERROR: " + errorCode, failingUrl + ": " + description, null, 1, "Ok", null, null).show();
// }
@Override
public void onReceivedHttpError(AwWebResourceRequest request, AwWebResourceResponse response) {
String failingUrl = request.url;
// HACK! Do not show the icon loading error.
if (failingUrl.toLowerCase(Locale.getDefault()).contains("favicon.ico")) return;
int errorCode = response.getStatusCode();
String description = response.getReasonPhrase();
createAlertDialog(AwShellActivity.this, "HTTP ERROR: " + errorCode, failingUrl + ": " + description, null, 1, "Ok", null, null).show();
}
@Override
public void onPageStarted(String url) {
if (mUrlTextView != null) {
mUrlTextView.setText(url);
saveStringToPreferences(LAST_USED_URL_PREFERENCE_NAME, url);
}
TangoJniNative.resetPose();
}
@Override
public void onShowCustomView(View view, WebChromeClient.CustomViewCallback callback) {
System.out.println("WebAR: onShowCustomView");
getWindow().setFlags(
WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
getWindow().addContentView(view,
new FrameLayout.LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.MATCH_PARENT,
Gravity.CENTER));
mCustomView = view;
}
@Override
public void onHideCustomView() {
getWindow().clearFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
FrameLayout decorView = (FrameLayout) getWindow().getDecorView();
decorView.removeView(mCustomView);
mCustomView = null;
}
@Override
public boolean shouldOverrideKeyEvent(KeyEvent event) {
if (event.getKeyCode() == KeyEvent.KEYCODE_BACK) {
return true;
}
return false;
}
@Override
public void onGeolocationPermissionsShowPrompt(String origin,
GeolocationPermissions.Callback callback) {
System.out.println("WebAR: onGeolocationPermissionsShowPrompt");
callback.invoke(origin, true, true);
// callback.invoke(origin, false, false);
}
@Override
public void onGeolocationPermissionsHidePrompt() {
System.out.println("WebAR: onGeolocationPermissionsHidePrompt");
}
@Override
public void onPermissionRequest(AwPermissionRequest awPermissionRequest) {
System.out.println("WebAR: onPermissionRequest");
// awPermissionRequest.deny();
awPermissionRequest.grant();
}
@Override
public void onPermissionRequestCanceled(AwPermissionRequest awPermissionRequest) {
System.out.println("WebAR: onPermissionRequestCanceled");
}
};
SharedPreferences sharedPreferences =
getSharedPreferences(PREFERENCES_NAME, Context.MODE_PRIVATE);
if (mBrowserContext == null) {
mBrowserContext = new AwBrowserContext(sharedPreferences, getApplicationContext());
}
final AwSettings awSettings = new AwSettings(this /* context */,
false /* isAccessFromFileURLsGrantedByDefault */, false /* supportsLegacyQuirks */,
false /* allowEmptyDocumentPersistence */,
true /* allowGeolocationOnInsecureOrigins */);
// Required for WebGL conformance tests.
awSettings.setMediaPlaybackRequiresUserGesture(false);
// Allow zoom and fit contents to screen
awSettings.setBuiltInZoomControls(false);
awSettings.setDisplayZoomControls(false);
awSettings.setUseWideViewPort(true);
awSettings.setLoadWithOverviewMode(true);
awSettings.setAllowFileAccessFromFileURLs(true);
awSettings.setAllowUniversalAccessFromFileURLs(true);
awSettings.setDomStorageEnabled(true);
awSettings.setLayoutAlgorithm(android.webkit.WebSettings.LayoutAlgorithm.TEXT_AUTOSIZING);
testContainerView.initialize(new AwContents(mBrowserContext, testContainerView,
testContainerView.getContext(), testContainerView.getInternalAccessDelegate(),
testContainerView.getNativeDrawGLFunctorFactory(), awContentsClient, awSettings));
testContainerView.getAwContents().getSettings().setJavaScriptEnabled(true);
if (mDevToolsServer == null) {
mDevToolsServer = new AwDevToolsServer();
mDevToolsServer.setRemoteDebuggingEnabled(true);
}
return testContainerView;
}
private static String getUrlFromIntent(Intent intent) {
return intent != null ? intent.getDataString() : null;
}
private void setKeyboardVisibilityForUrl(boolean visible) {
InputMethodManager imm = (InputMethodManager) getSystemService(
Context.INPUT_METHOD_SERVICE);
if (visible) {
imm.showSoftInput(mUrlTextView, InputMethodManager.SHOW_IMPLICIT);
} else {
imm.hideSoftInputFromWindow(mUrlTextView.getWindowToken(), 0);
}
}
private void initializeUrlField() {
mUrlTextView = (EditText) findViewById(R.id.url);
mUrlTextView.setOnEditorActionListener(new OnEditorActionListener() {
@Override
public boolean onEditorAction(TextView v, int actionId, KeyEvent event) {
if ((actionId != EditorInfo.IME_ACTION_GO) && (event == null
|| event.getKeyCode() != KeyEvent.KEYCODE_ENTER
|| event.getAction() != KeyEvent.ACTION_DOWN)) {
return false;
}
String url = mUrlTextView.getText().toString();
try {
URI uri = new URI(url);
if (uri.getScheme() == null) {
url = "http://" + uri.toString();
} else {
url = uri.toString();
}
} catch (URISyntaxException e) {
// Ignore syntax errors.
}
mAwTestContainerView.getAwContents().loadUrl(url);
mUrlTextView.clearFocus();
setKeyboardVisibilityForUrl(false);
mAwTestContainerView.requestFocus();
return true;
}
});
mUrlTextView.setOnFocusChangeListener(new OnFocusChangeListener() {
@Override
public void onFocusChange(View v, boolean hasFocus) {
setKeyboardVisibilityForUrl(hasFocus);
mNextButton.setVisibility(hasFocus ? View.GONE : View.VISIBLE);
mPrevButton.setVisibility(hasFocus ? View.GONE : View.VISIBLE);
if (!hasFocus) {
mUrlTextView.setText(mWebContents.getUrl());
}
}
});
}
private void initializeNavigationButtons() {
mPrevButton = (ImageButton) findViewById(R.id.prev);
mPrevButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
if (mNavigationController.canGoBack()) {
mNavigationController.goBack();
}
}
});
mNextButton = (ImageButton) findViewById(R.id.next);
mNextButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
if (mNavigationController.canGoForward()) {
mNavigationController.goForward();
}
}
});
mQRCodeButton = (ImageButton) findViewById(R.id.qrcodeImageButton);
mQRCodeButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
IntentIntegrator intentIntegrator = new IntentIntegrator(AwShellActivity.this);
intentIntegrator.initiateScan();
}
});
}
@Override
public boolean onKeyUp(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
if (mNavigationController.canGoBack()) {
mNavigationController.goBack();
return true;
}
}
return super.onKeyUp(keyCode, event);
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
WindowManager windowManager = (WindowManager) getSystemService(WINDOW_SERVICE);
Display display = windowManager.getDefaultDisplay();
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(CAMERA_ID, info);
TangoJniNative.onConfigurationChanged(display.getRotation(), info.orientation);
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent intent)
{
IntentResult scanResult = IntentIntegrator.parseActivityResult(requestCode, resultCode, intent);
if (scanResult != null && scanResult.getContents() != null)
{
String url = scanResult.getContents();
try
{
new URL(url);
mUrlTextView.setText(url);
mAwTestContainerView.getAwContents().loadUrl(url);
mUrlTextView.clearFocus();
setKeyboardVisibilityForUrl(false);
mAwTestContainerView.requestFocus();
}
catch(MalformedURLException e)
{
createAlertDialog(this, "Not an URL", "The read QRCode does not represent a valid URL.", null, 1, "Ok", null, null).show();
}
}
}
/*
boolean firstTouchDone = false;
@Override
public boolean dispatchTouchEvent( android.view.MotionEvent event )
{
android.view.Display display = getWindowManager().getDefaultDisplay();
int width = display.getWidth(); // deprecated
int height = display.getHeight(); // deprecated
int action = event.getAction();
float x = event.getRawX();
float y = event.getRawY();
System.out.println("x = " + x + ", width / 2 = " + width / 2);
if (!firstTouchDone && x > (width >> 1))
{
// mAwTestContainerView.getAwContents().loadUrl("http://judax.github.io/tango/");
mAwTestContainerView.getAwContents().loadUrl("http://192.168.0.104/judax/tango/video3js/");
mUrlTextView.clearFocus();
setKeyboardVisibilityForUrl(false);
mAwTestContainerView.requestFocus();
firstTouchDone = true;
return true;
}
else
{
return super.dispatchTouchEvent(event);
}
}
*/
private void waitForDebuggerIfNeeded() {
if (CommandLine.getInstance().hasSwitch(BaseSwitches.WAIT_FOR_JAVA_DEBUGGER)) {
Log.e(TAG, "Waiting for Java debugger to connect...");
android.os.Debug.waitForDebugger();
Log.e(TAG, "Java debugger connected. Resuming execution.");
}
}
}
|
googleapis/google-cloud-java | 36,261 | java-appengine-admin/google-cloud-appengine-admin/src/main/java/com/google/appengine/v1/DomainMappingsClient.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.appengine.v1;
import com.google.api.core.ApiFuture;
import com.google.api.core.ApiFutures;
import com.google.api.core.BetaApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.httpjson.longrunning.OperationsClient;
import com.google.api.gax.longrunning.OperationFuture;
import com.google.api.gax.paging.AbstractFixedSizeCollection;
import com.google.api.gax.paging.AbstractPage;
import com.google.api.gax.paging.AbstractPagedListResponse;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.appengine.v1.stub.DomainMappingsStub;
import com.google.appengine.v1.stub.DomainMappingsStubSettings;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.longrunning.Operation;
import com.google.protobuf.Empty;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Service Description: Manages domains serving an application.
*
* <p>This class provides the ability to make remote calls to the backing service through method
* calls that map to API methods. Sample code to get started:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (DomainMappingsClient domainMappingsClient = DomainMappingsClient.create()) {
* GetDomainMappingRequest request =
* GetDomainMappingRequest.newBuilder().setName("name3373707").build();
* DomainMapping response = domainMappingsClient.getDomainMapping(request);
* }
* }</pre>
*
* <p>Note: close() needs to be called on the DomainMappingsClient object to clean up resources such
* as threads. In the example above, try-with-resources is used, which automatically calls close().
*
* <table>
* <caption>Methods</caption>
* <tr>
* <th>Method</th>
* <th>Description</th>
* <th>Method Variants</th>
* </tr>
* <tr>
* <td><p> ListDomainMappings</td>
* <td><p> Lists the domain mappings on an application.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> listDomainMappings(ListDomainMappingsRequest request)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> listDomainMappingsPagedCallable()
* <li><p> listDomainMappingsCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> GetDomainMapping</td>
* <td><p> Gets the specified domain mapping.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> getDomainMapping(GetDomainMappingRequest request)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> getDomainMappingCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> CreateDomainMapping</td>
* <td><p> Maps a domain to an application. A user must be authorized to administer a domain in order to map it to an application. For a list of available authorized domains, see [`AuthorizedDomains.ListAuthorizedDomains`]().</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> createDomainMappingAsync(CreateDomainMappingRequest request)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> createDomainMappingOperationCallable()
* <li><p> createDomainMappingCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> UpdateDomainMapping</td>
* <td><p> Updates the specified domain mapping. To map an SSL certificate to a domain mapping, update `certificate_id` to point to an `AuthorizedCertificate` resource. A user must be authorized to administer the associated domain in order to update a `DomainMapping` resource.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> updateDomainMappingAsync(UpdateDomainMappingRequest request)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> updateDomainMappingOperationCallable()
* <li><p> updateDomainMappingCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> DeleteDomainMapping</td>
* <td><p> Deletes the specified domain mapping. A user must be authorized to administer the associated domain in order to delete a `DomainMapping` resource.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> deleteDomainMappingAsync(DeleteDomainMappingRequest request)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> deleteDomainMappingOperationCallable()
* <li><p> deleteDomainMappingCallable()
* </ul>
* </td>
* </tr>
* </table>
*
* <p>See the individual methods for example code.
*
* <p>Many parameters require resource names to be formatted in a particular way. To assist with
* these names, this class includes a format method for each type of name, and additionally a parse
* method to extract the individual identifiers contained within names that are returned.
*
* <p>This class can be customized by passing in a custom instance of DomainMappingsSettings to
* create(). For example:
*
* <p>To customize credentials:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* DomainMappingsSettings domainMappingsSettings =
* DomainMappingsSettings.newBuilder()
* .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
* .build();
* DomainMappingsClient domainMappingsClient = DomainMappingsClient.create(domainMappingsSettings);
* }</pre>
*
* <p>To customize the endpoint:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* DomainMappingsSettings domainMappingsSettings =
* DomainMappingsSettings.newBuilder().setEndpoint(myEndpoint).build();
* DomainMappingsClient domainMappingsClient = DomainMappingsClient.create(domainMappingsSettings);
* }</pre>
*
* <p>To use REST (HTTP1.1/JSON) transport (instead of gRPC) for sending and receiving requests over
* the wire:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* DomainMappingsSettings domainMappingsSettings =
* DomainMappingsSettings.newHttpJsonBuilder().build();
* DomainMappingsClient domainMappingsClient = DomainMappingsClient.create(domainMappingsSettings);
* }</pre>
*
* <p>Please refer to the GitHub repository's samples for more quickstart code snippets.
*/
@Generated("by gapic-generator-java")
public class DomainMappingsClient implements BackgroundResource {
private final DomainMappingsSettings settings;
private final DomainMappingsStub stub;
private final OperationsClient httpJsonOperationsClient;
private final com.google.longrunning.OperationsClient operationsClient;
/** Constructs an instance of DomainMappingsClient with default settings. */
public static final DomainMappingsClient create() throws IOException {
return create(DomainMappingsSettings.newBuilder().build());
}
/**
* Constructs an instance of DomainMappingsClient, using the given settings. The channels are
* created based on the settings passed in, or defaults for any settings that are not set.
*/
public static final DomainMappingsClient create(DomainMappingsSettings settings)
throws IOException {
return new DomainMappingsClient(settings);
}
/**
* Constructs an instance of DomainMappingsClient, using the given stub for making calls. This is
* for advanced usage - prefer using create(DomainMappingsSettings).
*/
public static final DomainMappingsClient create(DomainMappingsStub stub) {
return new DomainMappingsClient(stub);
}
/**
* Constructs an instance of DomainMappingsClient, using the given settings. This is protected so
* that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected DomainMappingsClient(DomainMappingsSettings settings) throws IOException {
this.settings = settings;
this.stub = ((DomainMappingsStubSettings) settings.getStubSettings()).createStub();
this.operationsClient =
com.google.longrunning.OperationsClient.create(this.stub.getOperationsStub());
this.httpJsonOperationsClient = OperationsClient.create(this.stub.getHttpJsonOperationsStub());
}
protected DomainMappingsClient(DomainMappingsStub stub) {
this.settings = null;
this.stub = stub;
this.operationsClient =
com.google.longrunning.OperationsClient.create(this.stub.getOperationsStub());
this.httpJsonOperationsClient = OperationsClient.create(this.stub.getHttpJsonOperationsStub());
}
public final DomainMappingsSettings getSettings() {
return settings;
}
public DomainMappingsStub getStub() {
return stub;
}
/**
* Returns the OperationsClient that can be used to query the status of a long-running operation
* returned by another API method call.
*/
public final com.google.longrunning.OperationsClient getOperationsClient() {
return operationsClient;
}
/**
* Returns the OperationsClient that can be used to query the status of a long-running operation
* returned by another API method call.
*/
@BetaApi
public final OperationsClient getHttpJsonOperationsClient() {
return httpJsonOperationsClient;
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Lists the domain mappings on an application.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (DomainMappingsClient domainMappingsClient = DomainMappingsClient.create()) {
* ListDomainMappingsRequest request =
* ListDomainMappingsRequest.newBuilder()
* .setParent("parent-995424086")
* .setPageSize(883849137)
* .setPageToken("pageToken873572522")
* .build();
* for (DomainMapping element : domainMappingsClient.listDomainMappings(request).iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ListDomainMappingsPagedResponse listDomainMappings(
ListDomainMappingsRequest request) {
return listDomainMappingsPagedCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Lists the domain mappings on an application.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (DomainMappingsClient domainMappingsClient = DomainMappingsClient.create()) {
* ListDomainMappingsRequest request =
* ListDomainMappingsRequest.newBuilder()
* .setParent("parent-995424086")
* .setPageSize(883849137)
* .setPageToken("pageToken873572522")
* .build();
* ApiFuture<DomainMapping> future =
* domainMappingsClient.listDomainMappingsPagedCallable().futureCall(request);
* // Do something.
* for (DomainMapping element : future.get().iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*/
public final UnaryCallable<ListDomainMappingsRequest, ListDomainMappingsPagedResponse>
listDomainMappingsPagedCallable() {
return stub.listDomainMappingsPagedCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Lists the domain mappings on an application.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (DomainMappingsClient domainMappingsClient = DomainMappingsClient.create()) {
* ListDomainMappingsRequest request =
* ListDomainMappingsRequest.newBuilder()
* .setParent("parent-995424086")
* .setPageSize(883849137)
* .setPageToken("pageToken873572522")
* .build();
* while (true) {
* ListDomainMappingsResponse response =
* domainMappingsClient.listDomainMappingsCallable().call(request);
* for (DomainMapping element : response.getDomainMappingsList()) {
* // doThingsWith(element);
* }
* String nextPageToken = response.getNextPageToken();
* if (!Strings.isNullOrEmpty(nextPageToken)) {
* request = request.toBuilder().setPageToken(nextPageToken).build();
* } else {
* break;
* }
* }
* }
* }</pre>
*/
public final UnaryCallable<ListDomainMappingsRequest, ListDomainMappingsResponse>
listDomainMappingsCallable() {
return stub.listDomainMappingsCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Gets the specified domain mapping.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (DomainMappingsClient domainMappingsClient = DomainMappingsClient.create()) {
* GetDomainMappingRequest request =
* GetDomainMappingRequest.newBuilder().setName("name3373707").build();
* DomainMapping response = domainMappingsClient.getDomainMapping(request);
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final DomainMapping getDomainMapping(GetDomainMappingRequest request) {
return getDomainMappingCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Gets the specified domain mapping.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (DomainMappingsClient domainMappingsClient = DomainMappingsClient.create()) {
* GetDomainMappingRequest request =
* GetDomainMappingRequest.newBuilder().setName("name3373707").build();
* ApiFuture<DomainMapping> future =
* domainMappingsClient.getDomainMappingCallable().futureCall(request);
* // Do something.
* DomainMapping response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<GetDomainMappingRequest, DomainMapping> getDomainMappingCallable() {
return stub.getDomainMappingCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Maps a domain to an application. A user must be authorized to administer a domain in order to
* map it to an application. For a list of available authorized domains, see
* [`AuthorizedDomains.ListAuthorizedDomains`]().
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (DomainMappingsClient domainMappingsClient = DomainMappingsClient.create()) {
* CreateDomainMappingRequest request =
* CreateDomainMappingRequest.newBuilder()
* .setParent("parent-995424086")
* .setDomainMapping(DomainMapping.newBuilder().build())
* .setOverrideStrategy(DomainOverrideStrategy.forNumber(0))
* .build();
* DomainMapping response = domainMappingsClient.createDomainMappingAsync(request).get();
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final OperationFuture<DomainMapping, OperationMetadataV1> createDomainMappingAsync(
CreateDomainMappingRequest request) {
return createDomainMappingOperationCallable().futureCall(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Maps a domain to an application. A user must be authorized to administer a domain in order to
* map it to an application. For a list of available authorized domains, see
* [`AuthorizedDomains.ListAuthorizedDomains`]().
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (DomainMappingsClient domainMappingsClient = DomainMappingsClient.create()) {
* CreateDomainMappingRequest request =
* CreateDomainMappingRequest.newBuilder()
* .setParent("parent-995424086")
* .setDomainMapping(DomainMapping.newBuilder().build())
* .setOverrideStrategy(DomainOverrideStrategy.forNumber(0))
* .build();
* OperationFuture<DomainMapping, OperationMetadataV1> future =
* domainMappingsClient.createDomainMappingOperationCallable().futureCall(request);
* // Do something.
* DomainMapping response = future.get();
* }
* }</pre>
*/
public final OperationCallable<CreateDomainMappingRequest, DomainMapping, OperationMetadataV1>
createDomainMappingOperationCallable() {
return stub.createDomainMappingOperationCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Maps a domain to an application. A user must be authorized to administer a domain in order to
* map it to an application. For a list of available authorized domains, see
* [`AuthorizedDomains.ListAuthorizedDomains`]().
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (DomainMappingsClient domainMappingsClient = DomainMappingsClient.create()) {
* CreateDomainMappingRequest request =
* CreateDomainMappingRequest.newBuilder()
* .setParent("parent-995424086")
* .setDomainMapping(DomainMapping.newBuilder().build())
* .setOverrideStrategy(DomainOverrideStrategy.forNumber(0))
* .build();
* ApiFuture<Operation> future =
* domainMappingsClient.createDomainMappingCallable().futureCall(request);
* // Do something.
* Operation response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<CreateDomainMappingRequest, Operation> createDomainMappingCallable() {
return stub.createDomainMappingCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Updates the specified domain mapping. To map an SSL certificate to a domain mapping, update
* `certificate_id` to point to an `AuthorizedCertificate` resource. A user must be authorized to
* administer the associated domain in order to update a `DomainMapping` resource.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (DomainMappingsClient domainMappingsClient = DomainMappingsClient.create()) {
* UpdateDomainMappingRequest request =
* UpdateDomainMappingRequest.newBuilder()
* .setName("name3373707")
* .setDomainMapping(DomainMapping.newBuilder().build())
* .setUpdateMask(FieldMask.newBuilder().build())
* .build();
* DomainMapping response = domainMappingsClient.updateDomainMappingAsync(request).get();
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final OperationFuture<DomainMapping, OperationMetadataV1> updateDomainMappingAsync(
UpdateDomainMappingRequest request) {
return updateDomainMappingOperationCallable().futureCall(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Updates the specified domain mapping. To map an SSL certificate to a domain mapping, update
* `certificate_id` to point to an `AuthorizedCertificate` resource. A user must be authorized to
* administer the associated domain in order to update a `DomainMapping` resource.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (DomainMappingsClient domainMappingsClient = DomainMappingsClient.create()) {
* UpdateDomainMappingRequest request =
* UpdateDomainMappingRequest.newBuilder()
* .setName("name3373707")
* .setDomainMapping(DomainMapping.newBuilder().build())
* .setUpdateMask(FieldMask.newBuilder().build())
* .build();
* OperationFuture<DomainMapping, OperationMetadataV1> future =
* domainMappingsClient.updateDomainMappingOperationCallable().futureCall(request);
* // Do something.
* DomainMapping response = future.get();
* }
* }</pre>
*/
public final OperationCallable<UpdateDomainMappingRequest, DomainMapping, OperationMetadataV1>
updateDomainMappingOperationCallable() {
return stub.updateDomainMappingOperationCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Updates the specified domain mapping. To map an SSL certificate to a domain mapping, update
* `certificate_id` to point to an `AuthorizedCertificate` resource. A user must be authorized to
* administer the associated domain in order to update a `DomainMapping` resource.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (DomainMappingsClient domainMappingsClient = DomainMappingsClient.create()) {
* UpdateDomainMappingRequest request =
* UpdateDomainMappingRequest.newBuilder()
* .setName("name3373707")
* .setDomainMapping(DomainMapping.newBuilder().build())
* .setUpdateMask(FieldMask.newBuilder().build())
* .build();
* ApiFuture<Operation> future =
* domainMappingsClient.updateDomainMappingCallable().futureCall(request);
* // Do something.
* Operation response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<UpdateDomainMappingRequest, Operation> updateDomainMappingCallable() {
return stub.updateDomainMappingCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Deletes the specified domain mapping. A user must be authorized to administer the associated
* domain in order to delete a `DomainMapping` resource.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (DomainMappingsClient domainMappingsClient = DomainMappingsClient.create()) {
* DeleteDomainMappingRequest request =
* DeleteDomainMappingRequest.newBuilder().setName("name3373707").build();
* domainMappingsClient.deleteDomainMappingAsync(request).get();
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final OperationFuture<Empty, OperationMetadataV1> deleteDomainMappingAsync(
DeleteDomainMappingRequest request) {
return deleteDomainMappingOperationCallable().futureCall(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Deletes the specified domain mapping. A user must be authorized to administer the associated
* domain in order to delete a `DomainMapping` resource.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (DomainMappingsClient domainMappingsClient = DomainMappingsClient.create()) {
* DeleteDomainMappingRequest request =
* DeleteDomainMappingRequest.newBuilder().setName("name3373707").build();
* OperationFuture<Empty, OperationMetadataV1> future =
* domainMappingsClient.deleteDomainMappingOperationCallable().futureCall(request);
* // Do something.
* future.get();
* }
* }</pre>
*/
public final OperationCallable<DeleteDomainMappingRequest, Empty, OperationMetadataV1>
deleteDomainMappingOperationCallable() {
return stub.deleteDomainMappingOperationCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Deletes the specified domain mapping. A user must be authorized to administer the associated
* domain in order to delete a `DomainMapping` resource.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (DomainMappingsClient domainMappingsClient = DomainMappingsClient.create()) {
* DeleteDomainMappingRequest request =
* DeleteDomainMappingRequest.newBuilder().setName("name3373707").build();
* ApiFuture<Operation> future =
* domainMappingsClient.deleteDomainMappingCallable().futureCall(request);
* // Do something.
* future.get();
* }
* }</pre>
*/
public final UnaryCallable<DeleteDomainMappingRequest, Operation> deleteDomainMappingCallable() {
return stub.deleteDomainMappingCallable();
}
@Override
public final void close() {
stub.close();
}
@Override
public void shutdown() {
stub.shutdown();
}
@Override
public boolean isShutdown() {
return stub.isShutdown();
}
@Override
public boolean isTerminated() {
return stub.isTerminated();
}
@Override
public void shutdownNow() {
stub.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return stub.awaitTermination(duration, unit);
}
public static class ListDomainMappingsPagedResponse
extends AbstractPagedListResponse<
ListDomainMappingsRequest,
ListDomainMappingsResponse,
DomainMapping,
ListDomainMappingsPage,
ListDomainMappingsFixedSizeCollection> {
public static ApiFuture<ListDomainMappingsPagedResponse> createAsync(
PageContext<ListDomainMappingsRequest, ListDomainMappingsResponse, DomainMapping> context,
ApiFuture<ListDomainMappingsResponse> futureResponse) {
ApiFuture<ListDomainMappingsPage> futurePage =
ListDomainMappingsPage.createEmptyPage().createPageAsync(context, futureResponse);
return ApiFutures.transform(
futurePage,
input -> new ListDomainMappingsPagedResponse(input),
MoreExecutors.directExecutor());
}
private ListDomainMappingsPagedResponse(ListDomainMappingsPage page) {
super(page, ListDomainMappingsFixedSizeCollection.createEmptyCollection());
}
}
public static class ListDomainMappingsPage
extends AbstractPage<
ListDomainMappingsRequest,
ListDomainMappingsResponse,
DomainMapping,
ListDomainMappingsPage> {
private ListDomainMappingsPage(
PageContext<ListDomainMappingsRequest, ListDomainMappingsResponse, DomainMapping> context,
ListDomainMappingsResponse response) {
super(context, response);
}
private static ListDomainMappingsPage createEmptyPage() {
return new ListDomainMappingsPage(null, null);
}
@Override
protected ListDomainMappingsPage createPage(
PageContext<ListDomainMappingsRequest, ListDomainMappingsResponse, DomainMapping> context,
ListDomainMappingsResponse response) {
return new ListDomainMappingsPage(context, response);
}
@Override
public ApiFuture<ListDomainMappingsPage> createPageAsync(
PageContext<ListDomainMappingsRequest, ListDomainMappingsResponse, DomainMapping> context,
ApiFuture<ListDomainMappingsResponse> futureResponse) {
return super.createPageAsync(context, futureResponse);
}
}
public static class ListDomainMappingsFixedSizeCollection
extends AbstractFixedSizeCollection<
ListDomainMappingsRequest,
ListDomainMappingsResponse,
DomainMapping,
ListDomainMappingsPage,
ListDomainMappingsFixedSizeCollection> {
private ListDomainMappingsFixedSizeCollection(
List<ListDomainMappingsPage> pages, int collectionSize) {
super(pages, collectionSize);
}
private static ListDomainMappingsFixedSizeCollection createEmptyCollection() {
return new ListDomainMappingsFixedSizeCollection(null, 0);
}
@Override
protected ListDomainMappingsFixedSizeCollection createCollection(
List<ListDomainMappingsPage> pages, int collectionSize) {
return new ListDomainMappingsFixedSizeCollection(pages, collectionSize);
}
}
}
|
openjdk/jdk8 | 36,222 | jdk/src/share/classes/java/nio/charset/Charset.java | /*
* Copyright (c) 2000, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package java.nio.charset;
import java.nio.ByteBuffer;
import java.nio.CharBuffer;
import java.nio.charset.spi.CharsetProvider;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Locale;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.ServiceLoader;
import java.util.ServiceConfigurationError;
import java.util.SortedMap;
import java.util.TreeMap;
import sun.misc.ASCIICaseInsensitiveComparator;
import sun.nio.cs.StandardCharsets;
import sun.nio.cs.ThreadLocalCoders;
import sun.security.action.GetPropertyAction;
/**
* A named mapping between sequences of sixteen-bit Unicode <a
* href="../../lang/Character.html#unicode">code units</a> and sequences of
* bytes. This class defines methods for creating decoders and encoders and
* for retrieving the various names associated with a charset. Instances of
* this class are immutable.
*
* <p> This class also defines static methods for testing whether a particular
* charset is supported, for locating charset instances by name, and for
* constructing a map that contains every charset for which support is
* available in the current Java virtual machine. Support for new charsets can
* be added via the service-provider interface defined in the {@link
* java.nio.charset.spi.CharsetProvider} class.
*
* <p> All of the methods defined in this class are safe for use by multiple
* concurrent threads.
*
*
* <a name="names"></a><a name="charenc"></a>
* <h2>Charset names</h2>
*
* <p> Charsets are named by strings composed of the following characters:
*
* <ul>
*
* <li> The uppercase letters <tt>'A'</tt> through <tt>'Z'</tt>
* (<tt>'\u0041'</tt> through <tt>'\u005a'</tt>),
*
* <li> The lowercase letters <tt>'a'</tt> through <tt>'z'</tt>
* (<tt>'\u0061'</tt> through <tt>'\u007a'</tt>),
*
* <li> The digits <tt>'0'</tt> through <tt>'9'</tt>
* (<tt>'\u0030'</tt> through <tt>'\u0039'</tt>),
*
* <li> The dash character <tt>'-'</tt>
* (<tt>'\u002d'</tt>, <small>HYPHEN-MINUS</small>),
*
* <li> The plus character <tt>'+'</tt>
* (<tt>'\u002b'</tt>, <small>PLUS SIGN</small>),
*
* <li> The period character <tt>'.'</tt>
* (<tt>'\u002e'</tt>, <small>FULL STOP</small>),
*
* <li> The colon character <tt>':'</tt>
* (<tt>'\u003a'</tt>, <small>COLON</small>), and
*
* <li> The underscore character <tt>'_'</tt>
* (<tt>'\u005f'</tt>, <small>LOW LINE</small>).
*
* </ul>
*
* A charset name must begin with either a letter or a digit. The empty string
* is not a legal charset name. Charset names are not case-sensitive; that is,
* case is always ignored when comparing charset names. Charset names
* generally follow the conventions documented in <a
* href="http://www.ietf.org/rfc/rfc2278.txt"><i>RFC 2278: IANA Charset
* Registration Procedures</i></a>.
*
* <p> Every charset has a <i>canonical name</i> and may also have one or more
* <i>aliases</i>. The canonical name is returned by the {@link #name() name} method
* of this class. Canonical names are, by convention, usually in upper case.
* The aliases of a charset are returned by the {@link #aliases() aliases}
* method.
*
* <p><a name="hn">Some charsets have an <i>historical name</i> that is defined for
* compatibility with previous versions of the Java platform.</a> A charset's
* historical name is either its canonical name or one of its aliases. The
* historical name is returned by the <tt>getEncoding()</tt> methods of the
* {@link java.io.InputStreamReader#getEncoding InputStreamReader} and {@link
* java.io.OutputStreamWriter#getEncoding OutputStreamWriter} classes.
*
* <p><a name="iana"> </a>If a charset listed in the <a
* href="http://www.iana.org/assignments/character-sets"><i>IANA Charset
* Registry</i></a> is supported by an implementation of the Java platform then
* its canonical name must be the name listed in the registry. Many charsets
* are given more than one name in the registry, in which case the registry
* identifies one of the names as <i>MIME-preferred</i>. If a charset has more
* than one registry name then its canonical name must be the MIME-preferred
* name and the other names in the registry must be valid aliases. If a
* supported charset is not listed in the IANA registry then its canonical name
* must begin with one of the strings <tt>"X-"</tt> or <tt>"x-"</tt>.
*
* <p> The IANA charset registry does change over time, and so the canonical
* name and the aliases of a particular charset may also change over time. To
* ensure compatibility it is recommended that no alias ever be removed from a
* charset, and that if the canonical name of a charset is changed then its
* previous canonical name be made into an alias.
*
*
* <h2>Standard charsets</h2>
*
*
*
* <p><a name="standard">Every implementation of the Java platform is required to support the
* following standard charsets.</a> Consult the release documentation for your
* implementation to see if any other charsets are supported. The behavior
* of such optional charsets may differ between implementations.
*
* <blockquote><table width="80%" summary="Description of standard charsets">
* <tr><th align="left">Charset</th><th align="left">Description</th></tr>
* <tr><td valign=top><tt>US-ASCII</tt></td>
* <td>Seven-bit ASCII, a.k.a. <tt>ISO646-US</tt>,
* a.k.a. the Basic Latin block of the Unicode character set</td></tr>
* <tr><td valign=top><tt>ISO-8859-1 </tt></td>
* <td>ISO Latin Alphabet No. 1, a.k.a. <tt>ISO-LATIN-1</tt></td></tr>
* <tr><td valign=top><tt>UTF-8</tt></td>
* <td>Eight-bit UCS Transformation Format</td></tr>
* <tr><td valign=top><tt>UTF-16BE</tt></td>
* <td>Sixteen-bit UCS Transformation Format,
* big-endian byte order</td></tr>
* <tr><td valign=top><tt>UTF-16LE</tt></td>
* <td>Sixteen-bit UCS Transformation Format,
* little-endian byte order</td></tr>
* <tr><td valign=top><tt>UTF-16</tt></td>
* <td>Sixteen-bit UCS Transformation Format,
* byte order identified by an optional byte-order mark</td></tr>
* </table></blockquote>
*
* <p> The <tt>UTF-8</tt> charset is specified by <a
* href="http://www.ietf.org/rfc/rfc2279.txt"><i>RFC 2279</i></a>; the
* transformation format upon which it is based is specified in
* Amendment 2 of ISO 10646-1 and is also described in the <a
* href="http://www.unicode.org/unicode/standard/standard.html"><i>Unicode
* Standard</i></a>.
*
* <p> The <tt>UTF-16</tt> charsets are specified by <a
* href="http://www.ietf.org/rfc/rfc2781.txt"><i>RFC 2781</i></a>; the
* transformation formats upon which they are based are specified in
* Amendment 1 of ISO 10646-1 and are also described in the <a
* href="http://www.unicode.org/unicode/standard/standard.html"><i>Unicode
* Standard</i></a>.
*
* <p> The <tt>UTF-16</tt> charsets use sixteen-bit quantities and are
* therefore sensitive to byte order. In these encodings the byte order of a
* stream may be indicated by an initial <i>byte-order mark</i> represented by
* the Unicode character <tt>'\uFEFF'</tt>. Byte-order marks are handled
* as follows:
*
* <ul>
*
* <li><p> When decoding, the <tt>UTF-16BE</tt> and <tt>UTF-16LE</tt>
* charsets interpret the initial byte-order marks as a <small>ZERO-WIDTH
* NON-BREAKING SPACE</small>; when encoding, they do not write
* byte-order marks. </p></li>
*
* <li><p> When decoding, the <tt>UTF-16</tt> charset interprets the
* byte-order mark at the beginning of the input stream to indicate the
* byte-order of the stream but defaults to big-endian if there is no
* byte-order mark; when encoding, it uses big-endian byte order and writes
* a big-endian byte-order mark. </p></li>
*
* </ul>
*
* In any case, byte order marks occurring after the first element of an
* input sequence are not omitted since the same code is used to represent
* <small>ZERO-WIDTH NON-BREAKING SPACE</small>.
*
* <p> Every instance of the Java virtual machine has a default charset, which
* may or may not be one of the standard charsets. The default charset is
* determined during virtual-machine startup and typically depends upon the
* locale and charset being used by the underlying operating system. </p>
*
* <p>The {@link StandardCharsets} class defines constants for each of the
* standard charsets.
*
* <h2>Terminology</h2>
*
* <p> The name of this class is taken from the terms used in
* <a href="http://www.ietf.org/rfc/rfc2278.txt"><i>RFC 2278</i></a>.
* In that document a <i>charset</i> is defined as the combination of
* one or more coded character sets and a character-encoding scheme.
* (This definition is confusing; some other software systems define
* <i>charset</i> as a synonym for <i>coded character set</i>.)
*
* <p> A <i>coded character set</i> is a mapping between a set of abstract
* characters and a set of integers. US-ASCII, ISO 8859-1,
* JIS X 0201, and Unicode are examples of coded character sets.
*
* <p> Some standards have defined a <i>character set</i> to be simply a
* set of abstract characters without an associated assigned numbering.
* An alphabet is an example of such a character set. However, the subtle
* distinction between <i>character set</i> and <i>coded character set</i>
* is rarely used in practice; the former has become a short form for the
* latter, including in the Java API specification.
*
* <p> A <i>character-encoding scheme</i> is a mapping between one or more
* coded character sets and a set of octet (eight-bit byte) sequences.
* UTF-8, UTF-16, ISO 2022, and EUC are examples of
* character-encoding schemes. Encoding schemes are often associated with
* a particular coded character set; UTF-8, for example, is used only to
* encode Unicode. Some schemes, however, are associated with multiple
* coded character sets; EUC, for example, can be used to encode
* characters in a variety of Asian coded character sets.
*
* <p> When a coded character set is used exclusively with a single
* character-encoding scheme then the corresponding charset is usually
* named for the coded character set; otherwise a charset is usually named
* for the encoding scheme and, possibly, the locale of the coded
* character sets that it supports. Hence <tt>US-ASCII</tt> is both the
* name of a coded character set and of the charset that encodes it, while
* <tt>EUC-JP</tt> is the name of the charset that encodes the
* JIS X 0201, JIS X 0208, and JIS X 0212
* coded character sets for the Japanese language.
*
* <p> The native character encoding of the Java programming language is
* UTF-16. A charset in the Java platform therefore defines a mapping
* between sequences of sixteen-bit UTF-16 code units (that is, sequences
* of chars) and sequences of bytes. </p>
*
*
* @author Mark Reinhold
* @author JSR-51 Expert Group
* @since 1.4
*
* @see CharsetDecoder
* @see CharsetEncoder
* @see java.nio.charset.spi.CharsetProvider
* @see java.lang.Character
*/
public abstract class Charset
implements Comparable<Charset>
{
/* -- Static methods -- */
private static volatile String bugLevel = null;
static boolean atBugLevel(String bl) { // package-private
String level = bugLevel;
if (level == null) {
if (!sun.misc.VM.isBooted())
return false;
bugLevel = level = AccessController.doPrivileged(
new GetPropertyAction("sun.nio.cs.bugLevel", ""));
}
return level.equals(bl);
}
/**
* Checks that the given string is a legal charset name. </p>
*
* @param s
* A purported charset name
*
* @throws IllegalCharsetNameException
* If the given name is not a legal charset name
*/
private static void checkName(String s) {
int n = s.length();
if (!atBugLevel("1.4")) {
if (n == 0)
throw new IllegalCharsetNameException(s);
}
for (int i = 0; i < n; i++) {
char c = s.charAt(i);
if (c >= 'A' && c <= 'Z') continue;
if (c >= 'a' && c <= 'z') continue;
if (c >= '0' && c <= '9') continue;
if (c == '-' && i != 0) continue;
if (c == '+' && i != 0) continue;
if (c == ':' && i != 0) continue;
if (c == '_' && i != 0) continue;
if (c == '.' && i != 0) continue;
throw new IllegalCharsetNameException(s);
}
}
/* The standard set of charsets */
private static CharsetProvider standardProvider = new StandardCharsets();
// Cache of the most-recently-returned charsets,
// along with the names that were used to find them
//
private static volatile Object[] cache1 = null; // "Level 1" cache
private static volatile Object[] cache2 = null; // "Level 2" cache
private static void cache(String charsetName, Charset cs) {
cache2 = cache1;
cache1 = new Object[] { charsetName, cs };
}
// Creates an iterator that walks over the available providers, ignoring
// those whose lookup or instantiation causes a security exception to be
// thrown. Should be invoked with full privileges.
//
private static Iterator<CharsetProvider> providers() {
return new Iterator<CharsetProvider>() {
ClassLoader cl = ClassLoader.getSystemClassLoader();
ServiceLoader<CharsetProvider> sl =
ServiceLoader.load(CharsetProvider.class, cl);
Iterator<CharsetProvider> i = sl.iterator();
CharsetProvider next = null;
private boolean getNext() {
while (next == null) {
try {
if (!i.hasNext())
return false;
next = i.next();
} catch (ServiceConfigurationError sce) {
if (sce.getCause() instanceof SecurityException) {
// Ignore security exceptions
continue;
}
throw sce;
}
}
return true;
}
public boolean hasNext() {
return getNext();
}
public CharsetProvider next() {
if (!getNext())
throw new NoSuchElementException();
CharsetProvider n = next;
next = null;
return n;
}
public void remove() {
throw new UnsupportedOperationException();
}
};
}
// Thread-local gate to prevent recursive provider lookups
private static ThreadLocal<ThreadLocal<?>> gate =
new ThreadLocal<ThreadLocal<?>>();
private static Charset lookupViaProviders(final String charsetName) {
// The runtime startup sequence looks up standard charsets as a
// consequence of the VM's invocation of System.initializeSystemClass
// in order to, e.g., set system properties and encode filenames. At
// that point the application class loader has not been initialized,
// however, so we can't look for providers because doing so will cause
// that loader to be prematurely initialized with incomplete
// information.
//
if (!sun.misc.VM.isBooted())
return null;
if (gate.get() != null)
// Avoid recursive provider lookups
return null;
try {
gate.set(gate);
return AccessController.doPrivileged(
new PrivilegedAction<Charset>() {
public Charset run() {
for (Iterator<CharsetProvider> i = providers();
i.hasNext();) {
CharsetProvider cp = i.next();
Charset cs = cp.charsetForName(charsetName);
if (cs != null)
return cs;
}
return null;
}
});
} finally {
gate.set(null);
}
}
/* The extended set of charsets */
private static class ExtendedProviderHolder {
static final CharsetProvider extendedProvider = extendedProvider();
// returns ExtendedProvider, if installed
private static CharsetProvider extendedProvider() {
return AccessController.doPrivileged(
new PrivilegedAction<CharsetProvider>() {
public CharsetProvider run() {
try {
Class<?> epc
= Class.forName("sun.nio.cs.ext.ExtendedCharsets");
return (CharsetProvider)epc.newInstance();
} catch (ClassNotFoundException x) {
// Extended charsets not available
// (charsets.jar not present)
} catch (InstantiationException |
IllegalAccessException x) {
throw new Error(x);
}
return null;
}
});
}
}
private static Charset lookupExtendedCharset(String charsetName) {
CharsetProvider ecp = ExtendedProviderHolder.extendedProvider;
return (ecp != null) ? ecp.charsetForName(charsetName) : null;
}
private static Charset lookup(String charsetName) {
if (charsetName == null)
throw new IllegalArgumentException("Null charset name");
Object[] a;
if ((a = cache1) != null && charsetName.equals(a[0]))
return (Charset)a[1];
// We expect most programs to use one Charset repeatedly.
// We convey a hint to this effect to the VM by putting the
// level 1 cache miss code in a separate method.
return lookup2(charsetName);
}
private static Charset lookup2(String charsetName) {
Object[] a;
if ((a = cache2) != null && charsetName.equals(a[0])) {
cache2 = cache1;
cache1 = a;
return (Charset)a[1];
}
Charset cs;
if ((cs = standardProvider.charsetForName(charsetName)) != null ||
(cs = lookupExtendedCharset(charsetName)) != null ||
(cs = lookupViaProviders(charsetName)) != null)
{
cache(charsetName, cs);
return cs;
}
/* Only need to check the name if we didn't find a charset for it */
checkName(charsetName);
return null;
}
/**
* Tells whether the named charset is supported.
*
* @param charsetName
* The name of the requested charset; may be either
* a canonical name or an alias
*
* @return <tt>true</tt> if, and only if, support for the named charset
* is available in the current Java virtual machine
*
* @throws IllegalCharsetNameException
* If the given charset name is illegal
*
* @throws IllegalArgumentException
* If the given <tt>charsetName</tt> is null
*/
public static boolean isSupported(String charsetName) {
return (lookup(charsetName) != null);
}
/**
* Returns a charset object for the named charset.
*
* @param charsetName
* The name of the requested charset; may be either
* a canonical name or an alias
*
* @return A charset object for the named charset
*
* @throws IllegalCharsetNameException
* If the given charset name is illegal
*
* @throws IllegalArgumentException
* If the given <tt>charsetName</tt> is null
*
* @throws UnsupportedCharsetException
* If no support for the named charset is available
* in this instance of the Java virtual machine
*/
public static Charset forName(String charsetName) {
Charset cs = lookup(charsetName);
if (cs != null)
return cs;
throw new UnsupportedCharsetException(charsetName);
}
// Fold charsets from the given iterator into the given map, ignoring
// charsets whose names already have entries in the map.
//
private static void put(Iterator<Charset> i, Map<String,Charset> m) {
while (i.hasNext()) {
Charset cs = i.next();
if (!m.containsKey(cs.name()))
m.put(cs.name(), cs);
}
}
/**
* Constructs a sorted map from canonical charset names to charset objects.
*
* <p> The map returned by this method will have one entry for each charset
* for which support is available in the current Java virtual machine. If
* two or more supported charsets have the same canonical name then the
* resulting map will contain just one of them; which one it will contain
* is not specified. </p>
*
* <p> The invocation of this method, and the subsequent use of the
* resulting map, may cause time-consuming disk or network I/O operations
* to occur. This method is provided for applications that need to
* enumerate all of the available charsets, for example to allow user
* charset selection. This method is not used by the {@link #forName
* forName} method, which instead employs an efficient incremental lookup
* algorithm.
*
* <p> This method may return different results at different times if new
* charset providers are dynamically made available to the current Java
* virtual machine. In the absence of such changes, the charsets returned
* by this method are exactly those that can be retrieved via the {@link
* #forName forName} method. </p>
*
* @return An immutable, case-insensitive map from canonical charset names
* to charset objects
*/
public static SortedMap<String,Charset> availableCharsets() {
return AccessController.doPrivileged(
new PrivilegedAction<SortedMap<String,Charset>>() {
public SortedMap<String,Charset> run() {
TreeMap<String,Charset> m =
new TreeMap<String,Charset>(
ASCIICaseInsensitiveComparator.CASE_INSENSITIVE_ORDER);
put(standardProvider.charsets(), m);
CharsetProvider ecp = ExtendedProviderHolder.extendedProvider;
if (ecp != null)
put(ecp.charsets(), m);
for (Iterator<CharsetProvider> i = providers(); i.hasNext();) {
CharsetProvider cp = i.next();
put(cp.charsets(), m);
}
return Collections.unmodifiableSortedMap(m);
}
});
}
private static volatile Charset defaultCharset;
/**
* Returns the default charset of this Java virtual machine.
*
* <p> The default charset is determined during virtual-machine startup and
* typically depends upon the locale and charset of the underlying
* operating system.
*
* @return A charset object for the default charset
*
* @since 1.5
*/
public static Charset defaultCharset() {
if (defaultCharset == null) {
synchronized (Charset.class) {
String csn = AccessController.doPrivileged(
new GetPropertyAction("file.encoding"));
Charset cs = lookup(csn);
if (cs != null)
defaultCharset = cs;
else
defaultCharset = forName("UTF-8");
}
}
return defaultCharset;
}
/* -- Instance fields and methods -- */
private final String name; // tickles a bug in oldjavac
private final String[] aliases; // tickles a bug in oldjavac
private Set<String> aliasSet = null;
/**
* Initializes a new charset with the given canonical name and alias
* set.
*
* @param canonicalName
* The canonical name of this charset
*
* @param aliases
* An array of this charset's aliases, or null if it has no aliases
*
* @throws IllegalCharsetNameException
* If the canonical name or any of the aliases are illegal
*/
protected Charset(String canonicalName, String[] aliases) {
checkName(canonicalName);
String[] as = (aliases == null) ? new String[0] : aliases;
for (int i = 0; i < as.length; i++)
checkName(as[i]);
this.name = canonicalName;
this.aliases = as;
}
/**
* Returns this charset's canonical name.
*
* @return The canonical name of this charset
*/
public final String name() {
return name;
}
/**
* Returns a set containing this charset's aliases.
*
* @return An immutable set of this charset's aliases
*/
public final Set<String> aliases() {
if (aliasSet != null)
return aliasSet;
int n = aliases.length;
HashSet<String> hs = new HashSet<String>(n);
for (int i = 0; i < n; i++)
hs.add(aliases[i]);
aliasSet = Collections.unmodifiableSet(hs);
return aliasSet;
}
/**
* Returns this charset's human-readable name for the default locale.
*
* <p> The default implementation of this method simply returns this
* charset's canonical name. Concrete subclasses of this class may
* override this method in order to provide a localized display name. </p>
*
* @return The display name of this charset in the default locale
*/
public String displayName() {
return name;
}
/**
* Tells whether or not this charset is registered in the <a
* href="http://www.iana.org/assignments/character-sets">IANA Charset
* Registry</a>.
*
* @return <tt>true</tt> if, and only if, this charset is known by its
* implementor to be registered with the IANA
*/
public final boolean isRegistered() {
return !name.startsWith("X-") && !name.startsWith("x-");
}
/**
* Returns this charset's human-readable name for the given locale.
*
* <p> The default implementation of this method simply returns this
* charset's canonical name. Concrete subclasses of this class may
* override this method in order to provide a localized display name. </p>
*
* @param locale
* The locale for which the display name is to be retrieved
*
* @return The display name of this charset in the given locale
*/
public String displayName(Locale locale) {
return name;
}
/**
* Tells whether or not this charset contains the given charset.
*
* <p> A charset <i>C</i> is said to <i>contain</i> a charset <i>D</i> if,
* and only if, every character representable in <i>D</i> is also
* representable in <i>C</i>. If this relationship holds then it is
* guaranteed that every string that can be encoded in <i>D</i> can also be
* encoded in <i>C</i> without performing any replacements.
*
* <p> That <i>C</i> contains <i>D</i> does not imply that each character
* representable in <i>C</i> by a particular byte sequence is represented
* in <i>D</i> by the same byte sequence, although sometimes this is the
* case.
*
* <p> Every charset contains itself.
*
* <p> This method computes an approximation of the containment relation:
* If it returns <tt>true</tt> then the given charset is known to be
* contained by this charset; if it returns <tt>false</tt>, however, then
* it is not necessarily the case that the given charset is not contained
* in this charset.
*
* @param cs
* The given charset
*
* @return <tt>true</tt> if the given charset is contained in this charset
*/
public abstract boolean contains(Charset cs);
/**
* Constructs a new decoder for this charset.
*
* @return A new decoder for this charset
*/
public abstract CharsetDecoder newDecoder();
/**
* Constructs a new encoder for this charset.
*
* @return A new encoder for this charset
*
* @throws UnsupportedOperationException
* If this charset does not support encoding
*/
public abstract CharsetEncoder newEncoder();
/**
* Tells whether or not this charset supports encoding.
*
* <p> Nearly all charsets support encoding. The primary exceptions are
* special-purpose <i>auto-detect</i> charsets whose decoders can determine
* which of several possible encoding schemes is in use by examining the
* input byte sequence. Such charsets do not support encoding because
* there is no way to determine which encoding should be used on output.
* Implementations of such charsets should override this method to return
* <tt>false</tt>. </p>
*
* @return <tt>true</tt> if, and only if, this charset supports encoding
*/
public boolean canEncode() {
return true;
}
/**
* Convenience method that decodes bytes in this charset into Unicode
* characters.
*
* <p> An invocation of this method upon a charset <tt>cs</tt> returns the
* same result as the expression
*
* <pre>
* cs.newDecoder()
* .onMalformedInput(CodingErrorAction.REPLACE)
* .onUnmappableCharacter(CodingErrorAction.REPLACE)
* .decode(bb); </pre>
*
* except that it is potentially more efficient because it can cache
* decoders between successive invocations.
*
* <p> This method always replaces malformed-input and unmappable-character
* sequences with this charset's default replacement byte array. In order
* to detect such sequences, use the {@link
* CharsetDecoder#decode(java.nio.ByteBuffer)} method directly. </p>
*
* @param bb The byte buffer to be decoded
*
* @return A char buffer containing the decoded characters
*/
public final CharBuffer decode(ByteBuffer bb) {
try {
return ThreadLocalCoders.decoderFor(this)
.onMalformedInput(CodingErrorAction.REPLACE)
.onUnmappableCharacter(CodingErrorAction.REPLACE)
.decode(bb);
} catch (CharacterCodingException x) {
throw new Error(x); // Can't happen
}
}
/**
* Convenience method that encodes Unicode characters into bytes in this
* charset.
*
* <p> An invocation of this method upon a charset <tt>cs</tt> returns the
* same result as the expression
*
* <pre>
* cs.newEncoder()
* .onMalformedInput(CodingErrorAction.REPLACE)
* .onUnmappableCharacter(CodingErrorAction.REPLACE)
* .encode(bb); </pre>
*
* except that it is potentially more efficient because it can cache
* encoders between successive invocations.
*
* <p> This method always replaces malformed-input and unmappable-character
* sequences with this charset's default replacement string. In order to
* detect such sequences, use the {@link
* CharsetEncoder#encode(java.nio.CharBuffer)} method directly. </p>
*
* @param cb The char buffer to be encoded
*
* @return A byte buffer containing the encoded characters
*/
public final ByteBuffer encode(CharBuffer cb) {
try {
return ThreadLocalCoders.encoderFor(this)
.onMalformedInput(CodingErrorAction.REPLACE)
.onUnmappableCharacter(CodingErrorAction.REPLACE)
.encode(cb);
} catch (CharacterCodingException x) {
throw new Error(x); // Can't happen
}
}
/**
* Convenience method that encodes a string into bytes in this charset.
*
* <p> An invocation of this method upon a charset <tt>cs</tt> returns the
* same result as the expression
*
* <pre>
* cs.encode(CharBuffer.wrap(s)); </pre>
*
* @param str The string to be encoded
*
* @return A byte buffer containing the encoded characters
*/
public final ByteBuffer encode(String str) {
return encode(CharBuffer.wrap(str));
}
/**
* Compares this charset to another.
*
* <p> Charsets are ordered by their canonical names, without regard to
* case. </p>
*
* @param that
* The charset to which this charset is to be compared
*
* @return A negative integer, zero, or a positive integer as this charset
* is less than, equal to, or greater than the specified charset
*/
public final int compareTo(Charset that) {
return (name().compareToIgnoreCase(that.name()));
}
/**
* Computes a hashcode for this charset.
*
* @return An integer hashcode
*/
public final int hashCode() {
return name().hashCode();
}
/**
* Tells whether or not this object is equal to another.
*
* <p> Two charsets are equal if, and only if, they have the same canonical
* names. A charset is never equal to any other type of object. </p>
*
* @return <tt>true</tt> if, and only if, this charset is equal to the
* given object
*/
public final boolean equals(Object ob) {
if (!(ob instanceof Charset))
return false;
if (this == ob)
return true;
return name.equals(((Charset)ob).name());
}
/**
* Returns a string describing this charset.
*
* @return A string describing this charset
*/
public final String toString() {
return name();
}
}
|
apache/cxf | 35,943 | rt/transports/http-undertow/src/test/java/org/apache/cxf/transport/http_undertow/UndertowHTTPDestinationTest.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cxf.transport.http_undertow;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import javax.xml.namespace.QName;
import jakarta.servlet.ServletInputStream;
import jakarta.servlet.ServletOutputStream;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
import jakarta.xml.bind.JAXBElement;
import org.apache.cxf.Bus;
import org.apache.cxf.BusException;
import org.apache.cxf.BusFactory;
import org.apache.cxf.bus.extension.ExtensionManagerBus;
import org.apache.cxf.common.util.StringUtils;
import org.apache.cxf.configuration.security.AuthorizationPolicy;
import org.apache.cxf.continuations.SuspendedInvocationException;
import org.apache.cxf.endpoint.EndpointResolverRegistry;
import org.apache.cxf.helpers.CastUtils;
import org.apache.cxf.io.AbstractWrappedOutputStream;
import org.apache.cxf.message.ExchangeImpl;
import org.apache.cxf.message.Message;
import org.apache.cxf.message.MessageImpl;
import org.apache.cxf.policy.PolicyDataEngine;
import org.apache.cxf.security.transport.TLSSessionInfo;
import org.apache.cxf.service.model.EndpointInfo;
import org.apache.cxf.service.model.ServiceInfo;
import org.apache.cxf.transport.Conduit;
import org.apache.cxf.transport.ConduitInitiator;
import org.apache.cxf.transport.ConduitInitiatorManager;
import org.apache.cxf.transport.Destination;
import org.apache.cxf.transport.MessageObserver;
import org.apache.cxf.transport.http.AbstractHTTPDestination;
import org.apache.cxf.transport.http.ContinuationProviderFactory;
import org.apache.cxf.transport.http.DestinationRegistry;
import org.apache.cxf.transport.http.HTTPTransportFactory;
import org.apache.cxf.transport.http.auth.DefaultBasicAuthSupplier;
import org.apache.cxf.transports.http.configuration.HTTPServerPolicy;
import org.apache.cxf.ws.addressing.AddressingProperties;
import org.apache.cxf.ws.addressing.EndpointReferenceType;
import org.apache.cxf.ws.addressing.EndpointReferenceUtils;
import org.apache.cxf.ws.addressing.JAXWSAConstants;
import io.undertow.util.HeaderMap;
import io.undertow.util.HttpString;
import org.junit.After;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.ArgumentMatchers.isA;
import static org.mockito.Mockito.doCallRealMethod;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class UndertowHTTPDestinationTest {
protected static final String AUTH_HEADER = "Authorization";
protected static final String USER = "copernicus";
protected static final String PASSWD = "epicycles";
protected static final String BASIC_AUTH = DefaultBasicAuthSupplier.getBasicAuthHeader(USER, PASSWD);
private static final String NOWHERE = "http://nada.nothing.nowhere.null/";
private static final String PAYLOAD = "message payload";
private static final String CHALLENGE_HEADER = "WWW-Authenticate";
private static final String BASIC_CHALLENGE = "Basic realm=terra";
private static final String DIGEST_CHALLENGE = "Digest realm=luna";
private static final String CUSTOM_CHALLENGE = "Custom realm=sol";
private Bus bus;
private Bus threadDefaultBus;
private Conduit decoupledBackChannel;
private EndpointInfo endpointInfo;
private EndpointReferenceType address;
private UndertowHTTPServerEngine engine;
private HTTPServerPolicy policy;
private UndertowHTTPDestination destination;
private HttpServletRequest request;
private HttpServletResponse response;
private Message inMessage;
private Message outMessage;
private MessageObserver observer;
private ServletInputStream is;
private ServletOutputStream os;
private HTTPTransportFactory transportFactory;
/**
* This class replaces the engine in the Undertow Destination.
*/
private class EasyMockUndertowHTTPDestination
extends UndertowHTTPDestination {
EasyMockUndertowHTTPDestination(Bus bus,
DestinationRegistry registry,
EndpointInfo endpointInfo,
UndertowHTTPServerEngineFactory serverEngineFactory,
UndertowHTTPServerEngine easyMockEngine) throws IOException {
super(bus, registry, endpointInfo, serverEngineFactory);
engine = easyMockEngine;
}
@Override
public void retrieveEngine() {
// Leave engine alone.
}
}
@After
public void tearDown() {
if (bus != null) {
bus.shutdown(true);
}
bus = null;
transportFactory = null;
decoupledBackChannel = null;
address = null;
engine = null;
request = null;
response = null;
inMessage = null;
outMessage = null;
is = null;
os = null;
destination = null;
BusFactory.setDefaultBus(null);
}
@Test
public void testGetAddress() throws Exception {
destination = setUpDestination();
EndpointReferenceType ref = destination.getAddress();
assertNotNull("unexpected null address", ref);
assertEquals("unexpected address",
EndpointReferenceUtils.getAddress(ref),
StringUtils.addDefaultPortIfMissing(EndpointReferenceUtils.getAddress(address)));
assertEquals("unexpected service name local part",
EndpointReferenceUtils.getServiceName(ref, bus).getLocalPart(),
"Service");
assertEquals("unexpected portName",
EndpointReferenceUtils.getPortName(ref),
"Port");
}
@Test
public void testRandomPortAllocation() throws Exception {
bus = BusFactory.getDefaultBus(true);
transportFactory = new HTTPTransportFactory();
ServiceInfo serviceInfo = new ServiceInfo();
serviceInfo.setName(new QName("bla", "Service"));
EndpointInfo ei = new EndpointInfo(serviceInfo, "");
ei.setName(new QName("bla", "Port"));
Destination d1 = transportFactory.getDestination(ei, bus);
URL url = new URL(d1.getAddress().getAddress().getValue());
assertTrue("No random port has been allocated",
url.getPort() > 0);
}
@Test
public void testSuspendedException() throws Exception {
destination = setUpDestination(false, false);
setUpDoService(false);
final RuntimeException ex = new RuntimeException();
observer = new MessageObserver() {
public void onMessage(Message m) {
throw new SuspendedInvocationException(ex);
}
};
destination.setMessageObserver(observer);
try {
destination.doService(request, response);
fail("Suspended invocation swallowed");
} catch (RuntimeException runtimeEx) {
assertSame("Original exception is not preserved", ex, runtimeEx);
}
}
@Test
public void testContinuationsIgnored() throws Exception {
HttpServletRequest httpRequest = mock(HttpServletRequest.class);
ServiceInfo serviceInfo = new ServiceInfo();
serviceInfo.setName(new QName("bla", "Service"));
EndpointInfo ei = new EndpointInfo(serviceInfo, "");
ei.setName(new QName("bla", "Port"));
final UndertowHTTPServerEngine httpEngine = new UndertowHTTPServerEngine();
httpEngine.setContinuationsEnabled(false);
UndertowHTTPServerEngineFactory factory = new UndertowHTTPServerEngineFactory() {
@Override
public UndertowHTTPServerEngine retrieveUndertowHTTPServerEngine(int port) {
return httpEngine;
}
};
Bus b2 = new ExtensionManagerBus();
transportFactory = new HTTPTransportFactory();
b2.setExtension(factory, UndertowHTTPServerEngineFactory.class);
TestUndertowDestination testDestination =
new TestUndertowDestination(b2,
transportFactory.getRegistry(),
ei,
factory);
testDestination.finalizeConfig();
Message mi = testDestination.retrieveFromContinuation(httpRequest);
assertNull("Continuations must be ignored", mi);
}
@Test
public void testGetMultiple() throws Exception {
bus = BusFactory.getDefaultBus(true);
transportFactory = new HTTPTransportFactory();
ServiceInfo serviceInfo = new ServiceInfo();
serviceInfo.setName(new QName("bla", "Service"));
EndpointInfo ei = new EndpointInfo(serviceInfo, "");
ei.setName(new QName("bla", "Port"));
ei.setAddress("http://foo");
Destination d1 = transportFactory.getDestination(ei, bus);
Destination d2 = transportFactory.getDestination(ei, bus);
// Second get should not generate a new destination. It should just retrieve the existing one
assertEquals(d1, d2);
d2.shutdown();
Destination d3 = transportFactory.getDestination(ei, bus);
// Now a new destination should have been created
assertNotSame(d1, d3);
}
@Test
public void testRemoveServant() throws Exception {
destination = setUpDestination();
setUpRemoveServant();
destination.setMessageObserver(null);
}
@Test
public void testDoServiceRedirectURL() throws Exception {
destination = setUpDestination(false, false);
setUpDoService(true);
destination.doService(request, response);
}
@Test
public void testDoService() throws Exception {
Bus defaultBus = new ExtensionManagerBus();
assertSame("Default thread bus has not been set",
defaultBus, BusFactory.getThreadDefaultBus());
destination = setUpDestination(false, false);
setUpDoService(false);
assertSame("Default thread bus has been unexpectedly reset",
defaultBus, BusFactory.getThreadDefaultBus());
destination.doService(request, response);
verifyDoService();
assertSame("Default thread bus has not been reset",
defaultBus, BusFactory.getThreadDefaultBus());
}
@Test
public void testDoServiceWithWsdlGET() throws Exception {
destination = setUpDestination(false, false);
setUpDoService(false,
false,
false,
"GET",
"?wsdl",
200);
destination.doService(request, response);
assertNotNull("unexpected null message", inMessage);
assertEquals("unexpected method",
inMessage.get(Message.HTTP_REQUEST_METHOD),
"GET");
assertEquals("unexpected path",
inMessage.get(Message.PATH_INFO),
"/bar/foo");
assertEquals("unexpected query",
inMessage.get(Message.QUERY_STRING),
"wsdl");
}
@Test
public void testDoServiceWithHttpGET() throws Exception {
destination = setUpDestination(false, false);
setUpDoService(false,
false,
false,
"GET",
"?customerId=abc&cutomerAdd=def",
200);
destination.doService(request, response);
assertNotNull("unexpected null message", inMessage);
assertEquals("unexpected method",
inMessage.get(Message.HTTP_REQUEST_METHOD),
"GET");
assertEquals("unexpected path",
inMessage.get(Message.PATH_INFO),
"/bar/foo");
assertEquals("unexpected query",
inMessage.get(Message.QUERY_STRING),
"?customerId=abc&cutomerAdd=def");
}
@Test
public void testGetAnonBackChannel() throws Exception {
destination = setUpDestination(false, false);
setUpDoService(false);
destination.doService(request, response);
setUpInMessage();
Conduit backChannel = destination.getBackChannel(inMessage);
assertNotNull("expected back channel", backChannel);
assertEquals("unexpected target",
EndpointReferenceUtils.ANONYMOUS_ADDRESS,
backChannel.getTarget().getAddress().getValue());
}
@Test
public void testGetBackChannelSend() throws Exception {
destination = setUpDestination(false, false);
setUpDoService(false, true);
destination.doService(request, response);
setUpInMessage();
Conduit backChannel =
destination.getBackChannel(inMessage);
outMessage = setUpOutMessage();
backChannel.prepare(outMessage);
verifyBackChannelSend(backChannel, outMessage, 200);
}
@Test
public void testGetBackChannelSendFault() throws Exception {
destination = setUpDestination(false, false);
setUpDoService(false, true, 500);
destination.doService(request, response);
setUpInMessage();
Conduit backChannel =
destination.getBackChannel(inMessage);
outMessage = setUpOutMessage();
backChannel.prepare(outMessage);
verifyBackChannelSend(backChannel, outMessage, 500);
}
@Test
public void testGetBackChannelSendOneway() throws Exception {
destination = setUpDestination(false, false);
setUpDoService(false, true, 500);
destination.doService(request, response);
setUpInMessage();
Conduit backChannel =
destination.getBackChannel(inMessage);
outMessage = setUpOutMessage();
backChannel.prepare(outMessage);
verifyBackChannelSend(backChannel, outMessage, 500, true);
}
@Test
public void testGetBackChannelSendDecoupled() throws Exception {
destination = setUpDestination(false, false);
setUpDoService(false, true, true, 202);
destination.doService(request, response);
setUpInMessage();
Message partialResponse = setUpOutMessage();
partialResponse.put(Message.PARTIAL_RESPONSE_MESSAGE, Boolean.TRUE);
Conduit partialBackChannel =
destination.getBackChannel(inMessage);
partialBackChannel.prepare(partialResponse);
verifyBackChannelSend(partialBackChannel, partialResponse, 202);
outMessage = setUpOutMessage();
Conduit fullBackChannel =
destination.getBackChannel(inMessage);
fullBackChannel.prepare(outMessage);
}
@Test
public void testServerPolicyInServiceModel()
throws Exception {
policy = new HTTPServerPolicy();
address = getEPR("bar/foo");
bus = BusFactory.getDefaultBus(true);
transportFactory = new HTTPTransportFactory();
ServiceInfo serviceInfo = new ServiceInfo();
serviceInfo.setName(new QName("bla", "Service"));
endpointInfo = new EndpointInfo(serviceInfo, "");
endpointInfo.setName(new QName("bla", "Port"));
endpointInfo.addExtensor(policy);
engine = mock(UndertowHTTPServerEngine.class);
endpointInfo.setAddress(NOWHERE + "bar/foo");
UndertowHTTPDestination dest =
new EasyMockUndertowHTTPDestination(
bus, transportFactory.getRegistry(), endpointInfo, null, engine);
assertEquals(policy, dest.getServer());
}
@Test
public void testMultiplexGetAddressWithId() throws Exception {
destination = setUpDestination();
final String id = "ID2";
EndpointReferenceType refWithId = destination.getAddressWithId(id);
assertNotNull(refWithId);
assertNotNull(refWithId.getReferenceParameters());
assertNotNull(refWithId.getReferenceParameters().getAny());
assertTrue("it is an element",
refWithId.getReferenceParameters().getAny().get(0) instanceof JAXBElement);
JAXBElement<?> el = (JAXBElement<?>) refWithId.getReferenceParameters().getAny().get(0);
assertEquals("match our id", el.getValue(), id);
}
@Test
public void testMultiplexGetAddressWithIdForAddress() throws Exception {
destination = setUpDestination();
destination.setMultiplexWithAddress(true);
final String id = "ID3";
EndpointReferenceType refWithId = destination.getAddressWithId(id);
assertNotNull(refWithId);
assertNull(refWithId.getReferenceParameters());
assertTrue("match our id", EndpointReferenceUtils.getAddress(refWithId).indexOf(id) != -1);
}
@Test
public void testMultiplexGetIdForAddress() throws Exception {
destination = setUpDestination();
destination.setMultiplexWithAddress(true);
final String id = "ID3";
EndpointReferenceType refWithId = destination.getAddressWithId(id);
String pathInfo = EndpointReferenceUtils.getAddress(refWithId);
Map<String, Object> context = new HashMap<>();
assertNull("fails with no context", destination.getId(context));
context.put(Message.PATH_INFO, pathInfo);
String result = destination.getId(context);
assertNotNull(result);
assertEquals("match our id", result, id);
}
@Test
public void testMultiplexGetId() throws Exception {
destination = setUpDestination();
final String id = "ID3";
EndpointReferenceType refWithId = destination.getAddressWithId(id);
Map<String, Object> context = new HashMap<>();
assertNull("fails with no context", destination.getId(context));
AddressingProperties maps = mock(AddressingProperties.class);
when(maps.getToEndpointReference()).thenReturn(refWithId);
context.put(JAXWSAConstants.ADDRESSING_PROPERTIES_INBOUND, maps);
String result = destination.getId(context);
assertNotNull(result);
assertEquals("match our id", result, id);
}
private UndertowHTTPDestination setUpDestination()
throws Exception {
return setUpDestination(false, false);
};
private UndertowHTTPDestination setUpDestination(
boolean contextMatchOnStem, boolean mockedBus)
throws Exception {
policy = new HTTPServerPolicy();
address = getEPR("bar/foo");
transportFactory = new HTTPTransportFactory();
final ConduitInitiator ci = new ConduitInitiator() {
public Conduit getConduit(EndpointInfo targetInfo, Bus b) throws IOException {
return decoupledBackChannel;
}
public Conduit getConduit(EndpointInfo localInfo, EndpointReferenceType target, Bus b)
throws IOException {
return decoupledBackChannel;
}
public List<String> getTransportIds() {
return null;
}
public Set<String> getUriPrefixes() {
return new HashSet<>(Collections.singletonList("http"));
}
};
ConduitInitiatorManager mgr = new ConduitInitiatorManager() {
public void deregisterConduitInitiator(String name) {
}
public ConduitInitiator getConduitInitiator(String name) throws BusException {
return null;
}
public ConduitInitiator getConduitInitiatorForUri(String uri) {
return ci;
}
public void registerConduitInitiator(String name, ConduitInitiator factory) {
}
};
if (!mockedBus) {
bus = new ExtensionManagerBus();
bus.setExtension(mgr, ConduitInitiatorManager.class);
} else {
bus = mock(Bus.class);
when(bus.getExtension(EndpointResolverRegistry.class)).thenReturn(null);
when(bus.getExtension(ContinuationProviderFactory.class)).thenReturn(null);
when(bus.getExtension(PolicyDataEngine.class)).thenReturn(null);
when(bus.hasExtensionByName("org.apache.cxf.ws.policy.PolicyEngine")).thenReturn(false);
when(bus.getExtension(ClassLoader.class)).thenReturn(this.getClass().getClassLoader());
}
engine = mock(UndertowHTTPServerEngine.class);
ServiceInfo serviceInfo = new ServiceInfo();
serviceInfo.setName(new QName("bla", "Service"));
endpointInfo = new EndpointInfo(serviceInfo, "");
endpointInfo.setName(new QName("bla", "Port"));
endpointInfo.setAddress(NOWHERE + "bar/foo");
endpointInfo.addExtensor(policy);
doCallRealMethod().when(engine).addServant(eq(new URL(NOWHERE + "bar/foo")),
isA(UndertowHTTPHandler.class));
when(engine.getContinuationsEnabled()).thenReturn(true);
UndertowHTTPDestination dest = new EasyMockUndertowHTTPDestination(bus,
transportFactory.getRegistry(),
endpointInfo,
null,
engine);
dest.retrieveEngine();
policy = dest.getServer();
observer = new MessageObserver() {
public void onMessage(Message m) {
inMessage = m;
threadDefaultBus = BusFactory.getThreadDefaultBus();
}
};
dest.setMessageObserver(observer);
return dest;
}
private void setUpRemoveServant() throws Exception {
doCallRealMethod().when(engine).removeServant(eq(new URL(NOWHERE + "bar/foo")));
}
private void setUpDoService(boolean setRedirectURL) throws Exception {
setUpDoService(setRedirectURL, false);
}
private void setUpDoService(boolean setRedirectURL,
boolean sendResponse) throws Exception {
setUpDoService(setRedirectURL,
sendResponse,
false);
}
private void setUpDoService(boolean setRedirectURL,
boolean sendResponse, int status) throws Exception {
String method = "POST";
String query = "?name";
setUpDoService(setRedirectURL, sendResponse, false, method, query, status);
}
private void setUpDoService(boolean setRedirectURL,
boolean sendResponse, boolean decoupled, int status) throws Exception {
String method = "POST";
String query = "?name";
setUpDoService(setRedirectURL, sendResponse, decoupled, method, query, status);
}
private void setUpDoService(boolean setRedirectURL,
boolean sendResponse,
boolean decoupled) throws Exception {
String method = "POST";
String query = "?name";
setUpDoService(setRedirectURL, sendResponse, decoupled, method, query, 200);
}
private void setUpDoService(boolean setRedirectURL,
boolean sendResponse,
boolean decoupled,
String method,
String query,
int status
) throws Exception {
is = mock(ServletInputStream.class);
os = mock(ServletOutputStream.class);
request = mock(HttpServletRequest.class);
response = mock(HttpServletResponse.class);
when(request.getMethod()).thenReturn(method);
when(request.getUserPrincipal()).thenReturn(null);
if (setRedirectURL) {
policy.setRedirectURL(NOWHERE + "foo/bar");
doNothing().when(response).sendRedirect(eq(NOWHERE + "foo/bar"));
doNothing().when(response).flushBuffer();
} else {
//getQueryString for if statement
when(request.getQueryString()).thenReturn(query);
if ("GET".equals(method) && "?wsdl".equals(query)) {
when(request.getHeaderNames()).thenReturn(Collections.emptyEnumeration());
verifyGetWSDLQuery();
} else { // test for the post
when(request.getAttribute(AbstractHTTPDestination.CXF_CONTINUATION_MESSAGE))
.thenReturn(null);
when(request.getInputStream()).thenReturn(is);
when(request.getContextPath()).thenReturn("/bar");
when(request.getServletPath()).thenReturn("");
when(request.getPathInfo()).thenReturn("/foo");
when(request.getRequestURI()).thenReturn("/foo");
when(request.getRequestURL())
.thenReturn(new StringBuffer("http://localhost/foo"));
doNothing().when(request)
.setAttribute("org.springframework.web.servlet.HandlerMapping.bestMatchingPattern", "/foo");
when(request.getCharacterEncoding()).thenReturn(StandardCharsets.UTF_8.name());
when(request.getQueryString()).thenReturn(query);
when(request.getHeader("Accept")).thenReturn("*/*");
when(request.getContentType()).thenReturn("text/xml charset=utf8");
when(request.getAttribute("http.service.redirection")).thenReturn(null);
HeaderMap httpFields = new HeaderMap();
httpFields.add(new HttpString("content-type"), "text/xml");
httpFields.add(new HttpString("content-type"), "charset=utf8");
httpFields.put(new HttpString(UndertowHTTPDestinationTest.AUTH_HEADER),
UndertowHTTPDestinationTest.BASIC_AUTH);
List<String> headers = new ArrayList<>();
for (HttpString header : httpFields.getHeaderNames()) {
headers.add(header.toString());
}
when(request.getHeaderNames()).thenReturn(Collections.enumeration(headers));
when(request.getHeaders("content-type")).thenReturn(
Collections.enumeration(httpFields.get("content-type")));
when(request.getHeaders(UndertowHTTPDestinationTest.AUTH_HEADER)).thenReturn(Collections.enumeration(
httpFields.get(UndertowHTTPDestinationTest.AUTH_HEADER)));
when(request.getInputStream()).thenReturn(is);
doNothing().when(response).flushBuffer();
if (sendResponse) {
doNothing().when(response).setStatus(status);
doNothing().when(response).setContentType("text/xml charset=utf8");
doNothing().when(response).addHeader(isA(String.class), isA(String.class));
doNothing().when(response).setContentLength(0);
when(response.getOutputStream()).thenReturn(os);
when(response.getStatus()).thenReturn(status);
doNothing().when(response).flushBuffer();
}
when(request.getAttribute("jakarta.servlet.request.cipher_suite")).thenReturn("anythingwilldoreally");
when(request.getAttribute("javax.net.ssl.session")).thenReturn(null);
when(request.getAttribute("jakarta.servlet.request.X509Certificate")).thenReturn(null);
}
}
if (decoupled) {
setupDecoupledBackChannel();
}
}
private void setupDecoupledBackChannel() throws IOException {
decoupledBackChannel = mock(Conduit.class);
doNothing().when(decoupledBackChannel).setMessageObserver(isA(MessageObserver.class));
doNothing().when(decoupledBackChannel).prepare(isA(Message.class));
}
private void setUpInMessage() {
inMessage.setExchange(new ExchangeImpl());
}
private Message setUpOutMessage() {
Message outMsg = new MessageImpl();
outMsg.putAll(inMessage);
outMsg.setExchange(new ExchangeImpl());
outMsg.put(Message.PROTOCOL_HEADERS,
new TreeMap<String, List<String>>(String.CASE_INSENSITIVE_ORDER));
return outMsg;
}
private void setUpResponseHeaders(Message outMsg) {
Map<String, List<String>> responseHeaders =
CastUtils.cast((Map<?, ?>)outMsg.get(Message.PROTOCOL_HEADERS));
assertNotNull("expected response headers", responseHeaders);
List<String> challenges = new ArrayList<>();
challenges.add(BASIC_CHALLENGE);
challenges.add(DIGEST_CHALLENGE);
challenges.add(CUSTOM_CHALLENGE);
responseHeaders.put(CHALLENGE_HEADER, challenges);
}
private void verifyGetWSDLQuery() throws Exception {
when(request.getRequestURL()).thenReturn(new StringBuffer("http://localhost/bar/foo"));
when(request.getPathInfo()).thenReturn("/bar/foo");
when(request.getCharacterEncoding()).thenReturn(StandardCharsets.UTF_8.name());
when(request.getQueryString()).thenReturn("wsdl");
doNothing().when(response).setContentType("text/xml");
when(response.getOutputStream()).thenReturn(os);
}
private void verifyDoService() throws Exception {
assertSame("Default thread bus has not been set for request",
bus, threadDefaultBus);
assertNotNull("unexpected null message", inMessage);
assertSame("unexpected HTTP request",
inMessage.get(AbstractHTTPDestination.HTTP_REQUEST),
request);
assertSame("unexpected HTTP response",
inMessage.get(AbstractHTTPDestination.HTTP_RESPONSE),
response);
assertEquals("unexpected method",
inMessage.get(Message.HTTP_REQUEST_METHOD),
"POST");
assertEquals("unexpected path",
inMessage.get(Message.PATH_INFO),
"/bar/foo");
assertEquals("unexpected query",
inMessage.get(Message.QUERY_STRING),
"?name");
assertNotNull("unexpected query",
inMessage.get(TLSSessionInfo.class));
verifyRequestHeaders();
}
private void verifyRequestHeaders() throws Exception {
Map<String, List<String>> requestHeaders =
CastUtils.cast((Map<?, ?>)inMessage.get(Message.PROTOCOL_HEADERS));
assertNotNull("expected request headers",
requestHeaders);
List<String> values = requestHeaders.get("content-type");
assertNotNull("expected field", values);
assertEquals("unexpected values", 2, values.size());
assertTrue("expected value", values.contains("text/xml"));
assertTrue("expected value", values.contains("charset=utf8"));
values = requestHeaders.get(AUTH_HEADER);
assertNotNull("expected field", values);
assertEquals("unexpected values", 1, values.size());
assertTrue("expected value", values.contains(BASIC_AUTH));
AuthorizationPolicy authpolicy =
inMessage.get(AuthorizationPolicy.class);
assertNotNull("Expected some auth tokens", policy);
assertEquals("expected user",
USER,
authpolicy.getUserName());
assertEquals("expected passwd",
PASSWD,
authpolicy.getPassword());
}
private void verifyResponseHeaders(Message outMsg) throws Exception {
Map<String, List<String>> responseHeaders =
CastUtils.cast((Map<?, ?>)outMsg.get(Message.PROTOCOL_HEADERS));
assertNotNull("expected response headers",
responseHeaders);
}
private void verifyBackChannelSend(Conduit backChannel,
Message outMsg,
int status) throws Exception {
verifyBackChannelSend(backChannel, outMsg, status, false);
}
private void verifyBackChannelSend(Conduit backChannel,
Message outMsg,
int status,
boolean oneway) throws Exception {
outMsg.getExchange().setOneWay(oneway);
assertTrue("unexpected back channel type",
backChannel instanceof UndertowHTTPDestination.BackChannelConduit);
assertTrue("unexpected content formats",
outMsg.getContentFormats().contains(OutputStream.class));
OutputStream responseOS = outMsg.getContent(OutputStream.class);
assertNotNull("expected output stream", responseOS);
assertTrue("unexpected output stream type",
responseOS instanceof AbstractWrappedOutputStream);
outMsg.put(Message.RESPONSE_CODE, status);
responseOS.write(PAYLOAD.getBytes());
setUpResponseHeaders(outMsg);
responseOS.flush();
assertEquals("unexpected status",
status,
response.getStatus());
verifyResponseHeaders(outMsg);
if (oneway) {
assertNull("unexpected HTTP response",
outMsg.get(AbstractHTTPDestination.HTTP_RESPONSE));
} else {
assertNotNull("expected HTTP response",
outMsg.get(AbstractHTTPDestination.HTTP_RESPONSE));
responseOS.close();
}
}
static EndpointReferenceType getEPR(String s) {
return EndpointReferenceUtils.getEndpointReference(NOWHERE + s);
}
private static class TestUndertowDestination extends UndertowHTTPDestination {
TestUndertowDestination(Bus bus,
DestinationRegistry registry,
EndpointInfo endpointInfo,
UndertowHTTPServerEngineFactory serverEngineFactory) throws IOException {
super(bus, registry, endpointInfo, serverEngineFactory);
}
@Override
public Message retrieveFromContinuation(HttpServletRequest request) {
return super.retrieveFromContinuation(request);
}
}
} |
googleapis/google-cloud-java | 35,938 | java-oracledatabase/proto-google-cloud-oracledatabase-v1/src/main/java/com/google/cloud/oracledatabase/v1/ListDbServersResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/oracledatabase/v1/oracledatabase.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.oracledatabase.v1;
/**
*
*
* <pre>
* The response for `DbServer.List`.
* </pre>
*
* Protobuf type {@code google.cloud.oracledatabase.v1.ListDbServersResponse}
*/
public final class ListDbServersResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.oracledatabase.v1.ListDbServersResponse)
ListDbServersResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListDbServersResponse.newBuilder() to construct.
private ListDbServersResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListDbServersResponse() {
dbServers_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListDbServersResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.oracledatabase.v1.V1mainProto
.internal_static_google_cloud_oracledatabase_v1_ListDbServersResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.oracledatabase.v1.V1mainProto
.internal_static_google_cloud_oracledatabase_v1_ListDbServersResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.oracledatabase.v1.ListDbServersResponse.class,
com.google.cloud.oracledatabase.v1.ListDbServersResponse.Builder.class);
}
public static final int DB_SERVERS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.oracledatabase.v1.DbServer> dbServers_;
/**
*
*
* <pre>
* The list of database servers.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.DbServer db_servers = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.oracledatabase.v1.DbServer> getDbServersList() {
return dbServers_;
}
/**
*
*
* <pre>
* The list of database servers.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.DbServer db_servers = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.oracledatabase.v1.DbServerOrBuilder>
getDbServersOrBuilderList() {
return dbServers_;
}
/**
*
*
* <pre>
* The list of database servers.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.DbServer db_servers = 1;</code>
*/
@java.lang.Override
public int getDbServersCount() {
return dbServers_.size();
}
/**
*
*
* <pre>
* The list of database servers.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.DbServer db_servers = 1;</code>
*/
@java.lang.Override
public com.google.cloud.oracledatabase.v1.DbServer getDbServers(int index) {
return dbServers_.get(index);
}
/**
*
*
* <pre>
* The list of database servers.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.DbServer db_servers = 1;</code>
*/
@java.lang.Override
public com.google.cloud.oracledatabase.v1.DbServerOrBuilder getDbServersOrBuilder(int index) {
return dbServers_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < dbServers_.size(); i++) {
output.writeMessage(1, dbServers_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < dbServers_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, dbServers_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.oracledatabase.v1.ListDbServersResponse)) {
return super.equals(obj);
}
com.google.cloud.oracledatabase.v1.ListDbServersResponse other =
(com.google.cloud.oracledatabase.v1.ListDbServersResponse) obj;
if (!getDbServersList().equals(other.getDbServersList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDbServersCount() > 0) {
hash = (37 * hash) + DB_SERVERS_FIELD_NUMBER;
hash = (53 * hash) + getDbServersList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.oracledatabase.v1.ListDbServersResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.oracledatabase.v1.ListDbServersResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.oracledatabase.v1.ListDbServersResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.oracledatabase.v1.ListDbServersResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.oracledatabase.v1.ListDbServersResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.oracledatabase.v1.ListDbServersResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.oracledatabase.v1.ListDbServersResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.oracledatabase.v1.ListDbServersResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.oracledatabase.v1.ListDbServersResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.oracledatabase.v1.ListDbServersResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.oracledatabase.v1.ListDbServersResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.oracledatabase.v1.ListDbServersResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.oracledatabase.v1.ListDbServersResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The response for `DbServer.List`.
* </pre>
*
* Protobuf type {@code google.cloud.oracledatabase.v1.ListDbServersResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.oracledatabase.v1.ListDbServersResponse)
com.google.cloud.oracledatabase.v1.ListDbServersResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.oracledatabase.v1.V1mainProto
.internal_static_google_cloud_oracledatabase_v1_ListDbServersResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.oracledatabase.v1.V1mainProto
.internal_static_google_cloud_oracledatabase_v1_ListDbServersResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.oracledatabase.v1.ListDbServersResponse.class,
com.google.cloud.oracledatabase.v1.ListDbServersResponse.Builder.class);
}
// Construct using com.google.cloud.oracledatabase.v1.ListDbServersResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (dbServersBuilder_ == null) {
dbServers_ = java.util.Collections.emptyList();
} else {
dbServers_ = null;
dbServersBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.oracledatabase.v1.V1mainProto
.internal_static_google_cloud_oracledatabase_v1_ListDbServersResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.oracledatabase.v1.ListDbServersResponse getDefaultInstanceForType() {
return com.google.cloud.oracledatabase.v1.ListDbServersResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.oracledatabase.v1.ListDbServersResponse build() {
com.google.cloud.oracledatabase.v1.ListDbServersResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.oracledatabase.v1.ListDbServersResponse buildPartial() {
com.google.cloud.oracledatabase.v1.ListDbServersResponse result =
new com.google.cloud.oracledatabase.v1.ListDbServersResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.oracledatabase.v1.ListDbServersResponse result) {
if (dbServersBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
dbServers_ = java.util.Collections.unmodifiableList(dbServers_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.dbServers_ = dbServers_;
} else {
result.dbServers_ = dbServersBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.oracledatabase.v1.ListDbServersResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.oracledatabase.v1.ListDbServersResponse) {
return mergeFrom((com.google.cloud.oracledatabase.v1.ListDbServersResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.oracledatabase.v1.ListDbServersResponse other) {
if (other == com.google.cloud.oracledatabase.v1.ListDbServersResponse.getDefaultInstance())
return this;
if (dbServersBuilder_ == null) {
if (!other.dbServers_.isEmpty()) {
if (dbServers_.isEmpty()) {
dbServers_ = other.dbServers_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDbServersIsMutable();
dbServers_.addAll(other.dbServers_);
}
onChanged();
}
} else {
if (!other.dbServers_.isEmpty()) {
if (dbServersBuilder_.isEmpty()) {
dbServersBuilder_.dispose();
dbServersBuilder_ = null;
dbServers_ = other.dbServers_;
bitField0_ = (bitField0_ & ~0x00000001);
dbServersBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getDbServersFieldBuilder()
: null;
} else {
dbServersBuilder_.addAllMessages(other.dbServers_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.oracledatabase.v1.DbServer m =
input.readMessage(
com.google.cloud.oracledatabase.v1.DbServer.parser(), extensionRegistry);
if (dbServersBuilder_ == null) {
ensureDbServersIsMutable();
dbServers_.add(m);
} else {
dbServersBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.oracledatabase.v1.DbServer> dbServers_ =
java.util.Collections.emptyList();
private void ensureDbServersIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
dbServers_ =
new java.util.ArrayList<com.google.cloud.oracledatabase.v1.DbServer>(dbServers_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.oracledatabase.v1.DbServer,
com.google.cloud.oracledatabase.v1.DbServer.Builder,
com.google.cloud.oracledatabase.v1.DbServerOrBuilder>
dbServersBuilder_;
/**
*
*
* <pre>
* The list of database servers.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.DbServer db_servers = 1;</code>
*/
public java.util.List<com.google.cloud.oracledatabase.v1.DbServer> getDbServersList() {
if (dbServersBuilder_ == null) {
return java.util.Collections.unmodifiableList(dbServers_);
} else {
return dbServersBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of database servers.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.DbServer db_servers = 1;</code>
*/
public int getDbServersCount() {
if (dbServersBuilder_ == null) {
return dbServers_.size();
} else {
return dbServersBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of database servers.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.DbServer db_servers = 1;</code>
*/
public com.google.cloud.oracledatabase.v1.DbServer getDbServers(int index) {
if (dbServersBuilder_ == null) {
return dbServers_.get(index);
} else {
return dbServersBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of database servers.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.DbServer db_servers = 1;</code>
*/
public Builder setDbServers(int index, com.google.cloud.oracledatabase.v1.DbServer value) {
if (dbServersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDbServersIsMutable();
dbServers_.set(index, value);
onChanged();
} else {
dbServersBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of database servers.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.DbServer db_servers = 1;</code>
*/
public Builder setDbServers(
int index, com.google.cloud.oracledatabase.v1.DbServer.Builder builderForValue) {
if (dbServersBuilder_ == null) {
ensureDbServersIsMutable();
dbServers_.set(index, builderForValue.build());
onChanged();
} else {
dbServersBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of database servers.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.DbServer db_servers = 1;</code>
*/
public Builder addDbServers(com.google.cloud.oracledatabase.v1.DbServer value) {
if (dbServersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDbServersIsMutable();
dbServers_.add(value);
onChanged();
} else {
dbServersBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of database servers.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.DbServer db_servers = 1;</code>
*/
public Builder addDbServers(int index, com.google.cloud.oracledatabase.v1.DbServer value) {
if (dbServersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDbServersIsMutable();
dbServers_.add(index, value);
onChanged();
} else {
dbServersBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of database servers.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.DbServer db_servers = 1;</code>
*/
public Builder addDbServers(
com.google.cloud.oracledatabase.v1.DbServer.Builder builderForValue) {
if (dbServersBuilder_ == null) {
ensureDbServersIsMutable();
dbServers_.add(builderForValue.build());
onChanged();
} else {
dbServersBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of database servers.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.DbServer db_servers = 1;</code>
*/
public Builder addDbServers(
int index, com.google.cloud.oracledatabase.v1.DbServer.Builder builderForValue) {
if (dbServersBuilder_ == null) {
ensureDbServersIsMutable();
dbServers_.add(index, builderForValue.build());
onChanged();
} else {
dbServersBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of database servers.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.DbServer db_servers = 1;</code>
*/
public Builder addAllDbServers(
java.lang.Iterable<? extends com.google.cloud.oracledatabase.v1.DbServer> values) {
if (dbServersBuilder_ == null) {
ensureDbServersIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, dbServers_);
onChanged();
} else {
dbServersBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of database servers.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.DbServer db_servers = 1;</code>
*/
public Builder clearDbServers() {
if (dbServersBuilder_ == null) {
dbServers_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
dbServersBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of database servers.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.DbServer db_servers = 1;</code>
*/
public Builder removeDbServers(int index) {
if (dbServersBuilder_ == null) {
ensureDbServersIsMutable();
dbServers_.remove(index);
onChanged();
} else {
dbServersBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of database servers.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.DbServer db_servers = 1;</code>
*/
public com.google.cloud.oracledatabase.v1.DbServer.Builder getDbServersBuilder(int index) {
return getDbServersFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of database servers.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.DbServer db_servers = 1;</code>
*/
public com.google.cloud.oracledatabase.v1.DbServerOrBuilder getDbServersOrBuilder(int index) {
if (dbServersBuilder_ == null) {
return dbServers_.get(index);
} else {
return dbServersBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of database servers.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.DbServer db_servers = 1;</code>
*/
public java.util.List<? extends com.google.cloud.oracledatabase.v1.DbServerOrBuilder>
getDbServersOrBuilderList() {
if (dbServersBuilder_ != null) {
return dbServersBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(dbServers_);
}
}
/**
*
*
* <pre>
* The list of database servers.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.DbServer db_servers = 1;</code>
*/
public com.google.cloud.oracledatabase.v1.DbServer.Builder addDbServersBuilder() {
return getDbServersFieldBuilder()
.addBuilder(com.google.cloud.oracledatabase.v1.DbServer.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of database servers.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.DbServer db_servers = 1;</code>
*/
public com.google.cloud.oracledatabase.v1.DbServer.Builder addDbServersBuilder(int index) {
return getDbServersFieldBuilder()
.addBuilder(index, com.google.cloud.oracledatabase.v1.DbServer.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of database servers.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.DbServer db_servers = 1;</code>
*/
public java.util.List<com.google.cloud.oracledatabase.v1.DbServer.Builder>
getDbServersBuilderList() {
return getDbServersFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.oracledatabase.v1.DbServer,
com.google.cloud.oracledatabase.v1.DbServer.Builder,
com.google.cloud.oracledatabase.v1.DbServerOrBuilder>
getDbServersFieldBuilder() {
if (dbServersBuilder_ == null) {
dbServersBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.oracledatabase.v1.DbServer,
com.google.cloud.oracledatabase.v1.DbServer.Builder,
com.google.cloud.oracledatabase.v1.DbServerOrBuilder>(
dbServers_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
dbServers_ = null;
}
return dbServersBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.oracledatabase.v1.ListDbServersResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.oracledatabase.v1.ListDbServersResponse)
private static final com.google.cloud.oracledatabase.v1.ListDbServersResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.oracledatabase.v1.ListDbServersResponse();
}
public static com.google.cloud.oracledatabase.v1.ListDbServersResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListDbServersResponse> PARSER =
new com.google.protobuf.AbstractParser<ListDbServersResponse>() {
@java.lang.Override
public ListDbServersResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListDbServersResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListDbServersResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.oracledatabase.v1.ListDbServersResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,941 | java-service-control/proto-google-cloud-service-control-v1/src/main/java/com/google/api/servicecontrol/v1/MetricValueSet.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/api/servicecontrol/v1/metric_value.proto
// Protobuf Java Version: 3.25.8
package com.google.api.servicecontrol.v1;
/**
*
*
* <pre>
* Represents a set of metric values in the same metric.
* Each metric value in the set should have a unique combination of start time,
* end time, and label values.
* </pre>
*
* Protobuf type {@code google.api.servicecontrol.v1.MetricValueSet}
*/
public final class MetricValueSet extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.api.servicecontrol.v1.MetricValueSet)
MetricValueSetOrBuilder {
private static final long serialVersionUID = 0L;
// Use MetricValueSet.newBuilder() to construct.
private MetricValueSet(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private MetricValueSet() {
metricName_ = "";
metricValues_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new MetricValueSet();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.api.servicecontrol.v1.MetricValueSetProto
.internal_static_google_api_servicecontrol_v1_MetricValueSet_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.api.servicecontrol.v1.MetricValueSetProto
.internal_static_google_api_servicecontrol_v1_MetricValueSet_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.api.servicecontrol.v1.MetricValueSet.class,
com.google.api.servicecontrol.v1.MetricValueSet.Builder.class);
}
public static final int METRIC_NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object metricName_ = "";
/**
*
*
* <pre>
* The metric name defined in the service configuration.
* </pre>
*
* <code>string metric_name = 1;</code>
*
* @return The metricName.
*/
@java.lang.Override
public java.lang.String getMetricName() {
java.lang.Object ref = metricName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
metricName_ = s;
return s;
}
}
/**
*
*
* <pre>
* The metric name defined in the service configuration.
* </pre>
*
* <code>string metric_name = 1;</code>
*
* @return The bytes for metricName.
*/
@java.lang.Override
public com.google.protobuf.ByteString getMetricNameBytes() {
java.lang.Object ref = metricName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
metricName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int METRIC_VALUES_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private java.util.List<com.google.api.servicecontrol.v1.MetricValue> metricValues_;
/**
*
*
* <pre>
* The values in this metric.
* </pre>
*
* <code>repeated .google.api.servicecontrol.v1.MetricValue metric_values = 2;</code>
*/
@java.lang.Override
public java.util.List<com.google.api.servicecontrol.v1.MetricValue> getMetricValuesList() {
return metricValues_;
}
/**
*
*
* <pre>
* The values in this metric.
* </pre>
*
* <code>repeated .google.api.servicecontrol.v1.MetricValue metric_values = 2;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.api.servicecontrol.v1.MetricValueOrBuilder>
getMetricValuesOrBuilderList() {
return metricValues_;
}
/**
*
*
* <pre>
* The values in this metric.
* </pre>
*
* <code>repeated .google.api.servicecontrol.v1.MetricValue metric_values = 2;</code>
*/
@java.lang.Override
public int getMetricValuesCount() {
return metricValues_.size();
}
/**
*
*
* <pre>
* The values in this metric.
* </pre>
*
* <code>repeated .google.api.servicecontrol.v1.MetricValue metric_values = 2;</code>
*/
@java.lang.Override
public com.google.api.servicecontrol.v1.MetricValue getMetricValues(int index) {
return metricValues_.get(index);
}
/**
*
*
* <pre>
* The values in this metric.
* </pre>
*
* <code>repeated .google.api.servicecontrol.v1.MetricValue metric_values = 2;</code>
*/
@java.lang.Override
public com.google.api.servicecontrol.v1.MetricValueOrBuilder getMetricValuesOrBuilder(int index) {
return metricValues_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(metricName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, metricName_);
}
for (int i = 0; i < metricValues_.size(); i++) {
output.writeMessage(2, metricValues_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(metricName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, metricName_);
}
for (int i = 0; i < metricValues_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, metricValues_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.api.servicecontrol.v1.MetricValueSet)) {
return super.equals(obj);
}
com.google.api.servicecontrol.v1.MetricValueSet other =
(com.google.api.servicecontrol.v1.MetricValueSet) obj;
if (!getMetricName().equals(other.getMetricName())) return false;
if (!getMetricValuesList().equals(other.getMetricValuesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + METRIC_NAME_FIELD_NUMBER;
hash = (53 * hash) + getMetricName().hashCode();
if (getMetricValuesCount() > 0) {
hash = (37 * hash) + METRIC_VALUES_FIELD_NUMBER;
hash = (53 * hash) + getMetricValuesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.api.servicecontrol.v1.MetricValueSet parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.servicecontrol.v1.MetricValueSet parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.servicecontrol.v1.MetricValueSet parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.servicecontrol.v1.MetricValueSet parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.servicecontrol.v1.MetricValueSet parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.servicecontrol.v1.MetricValueSet parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.servicecontrol.v1.MetricValueSet parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.api.servicecontrol.v1.MetricValueSet parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.api.servicecontrol.v1.MetricValueSet parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.api.servicecontrol.v1.MetricValueSet parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.api.servicecontrol.v1.MetricValueSet parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.api.servicecontrol.v1.MetricValueSet parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.api.servicecontrol.v1.MetricValueSet prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Represents a set of metric values in the same metric.
* Each metric value in the set should have a unique combination of start time,
* end time, and label values.
* </pre>
*
* Protobuf type {@code google.api.servicecontrol.v1.MetricValueSet}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.api.servicecontrol.v1.MetricValueSet)
com.google.api.servicecontrol.v1.MetricValueSetOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.api.servicecontrol.v1.MetricValueSetProto
.internal_static_google_api_servicecontrol_v1_MetricValueSet_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.api.servicecontrol.v1.MetricValueSetProto
.internal_static_google_api_servicecontrol_v1_MetricValueSet_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.api.servicecontrol.v1.MetricValueSet.class,
com.google.api.servicecontrol.v1.MetricValueSet.Builder.class);
}
// Construct using com.google.api.servicecontrol.v1.MetricValueSet.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
metricName_ = "";
if (metricValuesBuilder_ == null) {
metricValues_ = java.util.Collections.emptyList();
} else {
metricValues_ = null;
metricValuesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.api.servicecontrol.v1.MetricValueSetProto
.internal_static_google_api_servicecontrol_v1_MetricValueSet_descriptor;
}
@java.lang.Override
public com.google.api.servicecontrol.v1.MetricValueSet getDefaultInstanceForType() {
return com.google.api.servicecontrol.v1.MetricValueSet.getDefaultInstance();
}
@java.lang.Override
public com.google.api.servicecontrol.v1.MetricValueSet build() {
com.google.api.servicecontrol.v1.MetricValueSet result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.api.servicecontrol.v1.MetricValueSet buildPartial() {
com.google.api.servicecontrol.v1.MetricValueSet result =
new com.google.api.servicecontrol.v1.MetricValueSet(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.api.servicecontrol.v1.MetricValueSet result) {
if (metricValuesBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
metricValues_ = java.util.Collections.unmodifiableList(metricValues_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.metricValues_ = metricValues_;
} else {
result.metricValues_ = metricValuesBuilder_.build();
}
}
private void buildPartial0(com.google.api.servicecontrol.v1.MetricValueSet result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.metricName_ = metricName_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.api.servicecontrol.v1.MetricValueSet) {
return mergeFrom((com.google.api.servicecontrol.v1.MetricValueSet) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.api.servicecontrol.v1.MetricValueSet other) {
if (other == com.google.api.servicecontrol.v1.MetricValueSet.getDefaultInstance())
return this;
if (!other.getMetricName().isEmpty()) {
metricName_ = other.metricName_;
bitField0_ |= 0x00000001;
onChanged();
}
if (metricValuesBuilder_ == null) {
if (!other.metricValues_.isEmpty()) {
if (metricValues_.isEmpty()) {
metricValues_ = other.metricValues_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureMetricValuesIsMutable();
metricValues_.addAll(other.metricValues_);
}
onChanged();
}
} else {
if (!other.metricValues_.isEmpty()) {
if (metricValuesBuilder_.isEmpty()) {
metricValuesBuilder_.dispose();
metricValuesBuilder_ = null;
metricValues_ = other.metricValues_;
bitField0_ = (bitField0_ & ~0x00000002);
metricValuesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getMetricValuesFieldBuilder()
: null;
} else {
metricValuesBuilder_.addAllMessages(other.metricValues_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
metricName_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
com.google.api.servicecontrol.v1.MetricValue m =
input.readMessage(
com.google.api.servicecontrol.v1.MetricValue.parser(), extensionRegistry);
if (metricValuesBuilder_ == null) {
ensureMetricValuesIsMutable();
metricValues_.add(m);
} else {
metricValuesBuilder_.addMessage(m);
}
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object metricName_ = "";
/**
*
*
* <pre>
* The metric name defined in the service configuration.
* </pre>
*
* <code>string metric_name = 1;</code>
*
* @return The metricName.
*/
public java.lang.String getMetricName() {
java.lang.Object ref = metricName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
metricName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The metric name defined in the service configuration.
* </pre>
*
* <code>string metric_name = 1;</code>
*
* @return The bytes for metricName.
*/
public com.google.protobuf.ByteString getMetricNameBytes() {
java.lang.Object ref = metricName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
metricName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The metric name defined in the service configuration.
* </pre>
*
* <code>string metric_name = 1;</code>
*
* @param value The metricName to set.
* @return This builder for chaining.
*/
public Builder setMetricName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
metricName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The metric name defined in the service configuration.
* </pre>
*
* <code>string metric_name = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearMetricName() {
metricName_ = getDefaultInstance().getMetricName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* The metric name defined in the service configuration.
* </pre>
*
* <code>string metric_name = 1;</code>
*
* @param value The bytes for metricName to set.
* @return This builder for chaining.
*/
public Builder setMetricNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
metricName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.util.List<com.google.api.servicecontrol.v1.MetricValue> metricValues_ =
java.util.Collections.emptyList();
private void ensureMetricValuesIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
metricValues_ =
new java.util.ArrayList<com.google.api.servicecontrol.v1.MetricValue>(metricValues_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.api.servicecontrol.v1.MetricValue,
com.google.api.servicecontrol.v1.MetricValue.Builder,
com.google.api.servicecontrol.v1.MetricValueOrBuilder>
metricValuesBuilder_;
/**
*
*
* <pre>
* The values in this metric.
* </pre>
*
* <code>repeated .google.api.servicecontrol.v1.MetricValue metric_values = 2;</code>
*/
public java.util.List<com.google.api.servicecontrol.v1.MetricValue> getMetricValuesList() {
if (metricValuesBuilder_ == null) {
return java.util.Collections.unmodifiableList(metricValues_);
} else {
return metricValuesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The values in this metric.
* </pre>
*
* <code>repeated .google.api.servicecontrol.v1.MetricValue metric_values = 2;</code>
*/
public int getMetricValuesCount() {
if (metricValuesBuilder_ == null) {
return metricValues_.size();
} else {
return metricValuesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The values in this metric.
* </pre>
*
* <code>repeated .google.api.servicecontrol.v1.MetricValue metric_values = 2;</code>
*/
public com.google.api.servicecontrol.v1.MetricValue getMetricValues(int index) {
if (metricValuesBuilder_ == null) {
return metricValues_.get(index);
} else {
return metricValuesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The values in this metric.
* </pre>
*
* <code>repeated .google.api.servicecontrol.v1.MetricValue metric_values = 2;</code>
*/
public Builder setMetricValues(int index, com.google.api.servicecontrol.v1.MetricValue value) {
if (metricValuesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMetricValuesIsMutable();
metricValues_.set(index, value);
onChanged();
} else {
metricValuesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The values in this metric.
* </pre>
*
* <code>repeated .google.api.servicecontrol.v1.MetricValue metric_values = 2;</code>
*/
public Builder setMetricValues(
int index, com.google.api.servicecontrol.v1.MetricValue.Builder builderForValue) {
if (metricValuesBuilder_ == null) {
ensureMetricValuesIsMutable();
metricValues_.set(index, builderForValue.build());
onChanged();
} else {
metricValuesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The values in this metric.
* </pre>
*
* <code>repeated .google.api.servicecontrol.v1.MetricValue metric_values = 2;</code>
*/
public Builder addMetricValues(com.google.api.servicecontrol.v1.MetricValue value) {
if (metricValuesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMetricValuesIsMutable();
metricValues_.add(value);
onChanged();
} else {
metricValuesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The values in this metric.
* </pre>
*
* <code>repeated .google.api.servicecontrol.v1.MetricValue metric_values = 2;</code>
*/
public Builder addMetricValues(int index, com.google.api.servicecontrol.v1.MetricValue value) {
if (metricValuesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMetricValuesIsMutable();
metricValues_.add(index, value);
onChanged();
} else {
metricValuesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The values in this metric.
* </pre>
*
* <code>repeated .google.api.servicecontrol.v1.MetricValue metric_values = 2;</code>
*/
public Builder addMetricValues(
com.google.api.servicecontrol.v1.MetricValue.Builder builderForValue) {
if (metricValuesBuilder_ == null) {
ensureMetricValuesIsMutable();
metricValues_.add(builderForValue.build());
onChanged();
} else {
metricValuesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The values in this metric.
* </pre>
*
* <code>repeated .google.api.servicecontrol.v1.MetricValue metric_values = 2;</code>
*/
public Builder addMetricValues(
int index, com.google.api.servicecontrol.v1.MetricValue.Builder builderForValue) {
if (metricValuesBuilder_ == null) {
ensureMetricValuesIsMutable();
metricValues_.add(index, builderForValue.build());
onChanged();
} else {
metricValuesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The values in this metric.
* </pre>
*
* <code>repeated .google.api.servicecontrol.v1.MetricValue metric_values = 2;</code>
*/
public Builder addAllMetricValues(
java.lang.Iterable<? extends com.google.api.servicecontrol.v1.MetricValue> values) {
if (metricValuesBuilder_ == null) {
ensureMetricValuesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, metricValues_);
onChanged();
} else {
metricValuesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The values in this metric.
* </pre>
*
* <code>repeated .google.api.servicecontrol.v1.MetricValue metric_values = 2;</code>
*/
public Builder clearMetricValues() {
if (metricValuesBuilder_ == null) {
metricValues_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
metricValuesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The values in this metric.
* </pre>
*
* <code>repeated .google.api.servicecontrol.v1.MetricValue metric_values = 2;</code>
*/
public Builder removeMetricValues(int index) {
if (metricValuesBuilder_ == null) {
ensureMetricValuesIsMutable();
metricValues_.remove(index);
onChanged();
} else {
metricValuesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The values in this metric.
* </pre>
*
* <code>repeated .google.api.servicecontrol.v1.MetricValue metric_values = 2;</code>
*/
public com.google.api.servicecontrol.v1.MetricValue.Builder getMetricValuesBuilder(int index) {
return getMetricValuesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The values in this metric.
* </pre>
*
* <code>repeated .google.api.servicecontrol.v1.MetricValue metric_values = 2;</code>
*/
public com.google.api.servicecontrol.v1.MetricValueOrBuilder getMetricValuesOrBuilder(
int index) {
if (metricValuesBuilder_ == null) {
return metricValues_.get(index);
} else {
return metricValuesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The values in this metric.
* </pre>
*
* <code>repeated .google.api.servicecontrol.v1.MetricValue metric_values = 2;</code>
*/
public java.util.List<? extends com.google.api.servicecontrol.v1.MetricValueOrBuilder>
getMetricValuesOrBuilderList() {
if (metricValuesBuilder_ != null) {
return metricValuesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(metricValues_);
}
}
/**
*
*
* <pre>
* The values in this metric.
* </pre>
*
* <code>repeated .google.api.servicecontrol.v1.MetricValue metric_values = 2;</code>
*/
public com.google.api.servicecontrol.v1.MetricValue.Builder addMetricValuesBuilder() {
return getMetricValuesFieldBuilder()
.addBuilder(com.google.api.servicecontrol.v1.MetricValue.getDefaultInstance());
}
/**
*
*
* <pre>
* The values in this metric.
* </pre>
*
* <code>repeated .google.api.servicecontrol.v1.MetricValue metric_values = 2;</code>
*/
public com.google.api.servicecontrol.v1.MetricValue.Builder addMetricValuesBuilder(int index) {
return getMetricValuesFieldBuilder()
.addBuilder(index, com.google.api.servicecontrol.v1.MetricValue.getDefaultInstance());
}
/**
*
*
* <pre>
* The values in this metric.
* </pre>
*
* <code>repeated .google.api.servicecontrol.v1.MetricValue metric_values = 2;</code>
*/
public java.util.List<com.google.api.servicecontrol.v1.MetricValue.Builder>
getMetricValuesBuilderList() {
return getMetricValuesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.api.servicecontrol.v1.MetricValue,
com.google.api.servicecontrol.v1.MetricValue.Builder,
com.google.api.servicecontrol.v1.MetricValueOrBuilder>
getMetricValuesFieldBuilder() {
if (metricValuesBuilder_ == null) {
metricValuesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.api.servicecontrol.v1.MetricValue,
com.google.api.servicecontrol.v1.MetricValue.Builder,
com.google.api.servicecontrol.v1.MetricValueOrBuilder>(
metricValues_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean());
metricValues_ = null;
}
return metricValuesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.api.servicecontrol.v1.MetricValueSet)
}
// @@protoc_insertion_point(class_scope:google.api.servicecontrol.v1.MetricValueSet)
private static final com.google.api.servicecontrol.v1.MetricValueSet DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.api.servicecontrol.v1.MetricValueSet();
}
public static com.google.api.servicecontrol.v1.MetricValueSet getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<MetricValueSet> PARSER =
new com.google.protobuf.AbstractParser<MetricValueSet>() {
@java.lang.Override
public MetricValueSet parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<MetricValueSet> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<MetricValueSet> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.api.servicecontrol.v1.MetricValueSet getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/jackrabbit-oak | 36,271 | oak-auth-external/src/test/java/org/apache/jackrabbit/oak/spi/security/authentication/external/impl/DynamicSyncContextTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.spi.security.authentication.external.impl;
import org.apache.jackrabbit.api.security.user.Authorizable;
import org.apache.jackrabbit.api.security.user.Group;
import org.apache.jackrabbit.api.security.user.User;
import org.apache.jackrabbit.api.security.user.UserManager;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.Tree;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.commons.collections.IterableUtils;
import org.apache.jackrabbit.oak.commons.collections.IteratorUtils;
import org.apache.jackrabbit.oak.commons.collections.SetUtils;
import org.apache.jackrabbit.oak.plugins.tree.TreeUtil;
import org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalGroup;
import org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalIdentity;
import org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalIdentityException;
import org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalIdentityProvider;
import org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalIdentityRef;
import org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalUser;
import org.apache.jackrabbit.oak.spi.security.authentication.external.SyncException;
import org.apache.jackrabbit.oak.spi.security.authentication.external.SyncResult;
import org.apache.jackrabbit.oak.spi.security.authentication.external.SyncedIdentity;
import org.apache.jackrabbit.oak.spi.security.authentication.external.TestIdentityProvider;
import org.apache.jackrabbit.oak.spi.security.authentication.external.basic.DefaultSyncConfig;
import org.apache.jackrabbit.oak.spi.security.authentication.external.basic.DefaultSyncContext;
import org.apache.jackrabbit.oak.spi.security.principal.PrincipalImpl;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.junit.Test;
import javax.jcr.RepositoryException;
import javax.jcr.Value;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.UUID;
import static java.util.stream.Collectors.toSet;
import static org.apache.jackrabbit.JcrConstants.JCR_UUID;
import static org.apache.jackrabbit.oak.spi.security.authentication.external.TestIdentityProvider.ID_SECOND_USER;
import static org.apache.jackrabbit.oak.spi.security.authentication.external.TestIdentityProvider.ID_TEST_USER;
import static org.apache.jackrabbit.oak.spi.security.authentication.external.impl.ExternalIdentityConstants.REP_EXTERNAL_ID;
import static org.apache.jackrabbit.oak.spi.security.authentication.external.impl.ExternalIdentityConstants.REP_EXTERNAL_PRINCIPAL_NAMES;
import static org.apache.jackrabbit.oak.spi.security.authentication.external.impl.ExternalIdentityConstants.REP_LAST_DYNAMIC_SYNC;
import static org.apache.jackrabbit.oak.spi.security.user.UserConstants.REP_MEMBERS;
import static org.apache.jackrabbit.oak.spi.security.user.UserConstants.REP_MEMBERS_LIST;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.clearInvocations;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class DynamicSyncContextTest extends AbstractDynamicTest {
static final String PREVIOUS_SYNCED_ID = "third";
static final long PREVIOUS_NESTING_DEPTH = Long.MAX_VALUE;
static final String GROUP_ID = "aaa";
/**
* Synchronized a separate user with DefaultSyncContext to test behavior for previously synchronized user/group
* with deep membership-nesting => all groups synched
*/
@NotNull ExternalUser syncPriorToDynamicMembership() throws Exception {
DefaultSyncConfig priorSyncConfig = createSyncConfig();
priorSyncConfig.user().setMembershipNestingDepth(PREVIOUS_NESTING_DEPTH);
String idpName = idp.getName();
TestIdentityProvider tidp = (TestIdentityProvider) idp;
tidp.addGroup(new TestIdentityProvider.TestGroup("ttt", idpName));
tidp.addGroup(new TestIdentityProvider.TestGroup("tt", idpName).withGroups("ttt"));
tidp.addGroup(new TestIdentityProvider.TestGroup("thirdGroup", idpName).withGroups("tt"));
tidp.addGroup(new TestIdentityProvider.TestGroup("forthGroup", idpName));
tidp.addUser(new TestIdentityProvider.TestUser(PREVIOUS_SYNCED_ID, idpName).withGroups("thirdGroup", "forthGroup"));
UserManager um = getUserManager(r);
DefaultSyncContext ctx = new DefaultSyncContext(priorSyncConfig, idp, um, getValueFactory(r));
ExternalUser previouslySyncedUser = idp.getUser(PREVIOUS_SYNCED_ID);
assertNotNull(previouslySyncedUser);
SyncResult result = ctx.sync(previouslySyncedUser);
assertSame(SyncResult.Status.ADD, result.getStatus());
ctx.close();
r.commit();
return previouslySyncedUser;
}
protected void assertDynamicMembership(@NotNull ExternalIdentity externalIdentity, long depth) throws Exception {
Authorizable a = userManager.getAuthorizable(externalIdentity.getId());
assertNotNull(a);
assertDynamicMembership(a, externalIdentity, depth);
}
private void assertDynamicMembership(@NotNull Authorizable a, @NotNull ExternalIdentity externalIdentity, long depth) throws Exception {
Value[] vs = a.getProperty(REP_EXTERNAL_PRINCIPAL_NAMES);
Set<String> pNames = Arrays.stream(vs).map(value -> {
try {
return value.getString();
} catch (RepositoryException e) {
return null;
}
}).filter(Objects::nonNull).collect(toSet());
Set<String> expected = new HashSet<>();
collectGroupPrincipals(expected, externalIdentity.getDeclaredGroups(), depth);
assertEquals(expected, pNames);
}
private void collectGroupPrincipals(Set<String> pNames, @NotNull Iterable<ExternalIdentityRef> declaredGroups, long depth) throws ExternalIdentityException {
if (depth <= 0) {
return;
}
for (ExternalIdentityRef ref : declaredGroups) {
ExternalIdentity ei = idp.getIdentity(ref);
pNames.add(ei.getPrincipalName());
collectGroupPrincipals(pNames, ei.getDeclaredGroups(), depth - 1);
}
}
void assertSyncedMembership(@NotNull UserManager userManager,
@NotNull Authorizable a,
@NotNull ExternalIdentity externalIdentity) throws Exception {
assertSyncedMembership(userManager, a, externalIdentity, syncConfig.user().getMembershipNestingDepth());
}
void assertSyncedMembership(@NotNull UserManager userManager,
@NotNull Authorizable a,
@NotNull ExternalIdentity externalIdentity,
long membershipNestingDepth) throws Exception {
Iterable<ExternalIdentityRef> declaredGroupRefs = externalIdentity.getDeclaredGroups();
Set<ExternalIdentityRef> expectedGroupRefs = getExpectedSyncedGroupRefs(membershipNestingDepth, idp, externalIdentity);
for (ExternalIdentityRef ref : expectedGroupRefs) {
Group gr = userManager.getAuthorizable(ref.getId(), Group.class);
assertNotNull(gr);
assertTrue(gr.isMember(a));
List<String> ids = getIds(a.memberOf());
assertTrue("Expected "+ids+ " to contain "+gr.getID(), ids.contains(gr.getID()));
if (IterableUtils.contains(declaredGroupRefs, ref)) {
assertTrue(gr.isDeclaredMember(a));
assertTrue(IteratorUtils.contains(a.declaredMemberOf(), gr));
}
}
}
void assertDeclaredGroups(@NotNull ExternalUser externalUser) throws Exception {
Set<ExternalIdentityRef> expectedGroupRefs = getExpectedSyncedGroupRefs(syncConfig.user().getMembershipNestingDepth(), idp, externalUser);
for (ExternalIdentityRef ref : expectedGroupRefs) {
Authorizable gr = userManager.getAuthorizable(ref.getId());
if (syncConfig.group().getDynamicGroups()) {
assertNotNull(gr);
} else {
assertNull(gr);
}
}
}
static boolean hasStoredMembershipInformation(@NotNull Tree groupTree, @NotNull Tree memberTree) {
String ref = TreeUtil.getString(memberTree, JCR_UUID);
assertNotNull(ref);
if (containsMemberRef(groupTree, ref)) {
return true;
}
if (groupTree.hasChild(REP_MEMBERS_LIST)) {
for (Tree t : groupTree.getChild(REP_MEMBERS_LIST).getChildren()) {
if (containsMemberRef(t, ref)) {
return true;
}
}
}
return false;
}
private static boolean containsMemberRef(@NotNull Tree tree, @NotNull String ref) {
Iterable<String> memberRefs = TreeUtil.getStrings(tree, REP_MEMBERS);
return memberRefs != null && IterableUtils.contains(memberRefs, ref);
}
@Test(expected = IllegalArgumentException.class)
public void testSyncExternalIdentity() throws Exception {
syncContext.sync(new TestIdentityProvider.TestIdentity());
}
@Test
public void testSyncExternalUser() throws Exception {
ExternalUser externalUser = idp.getUser(USER_ID);
sync(externalUser, SyncResult.Status.ADD);
Authorizable a = userManager.getAuthorizable(USER_ID);
assertNotNull(a);
assertDeclaredGroups(externalUser);
}
@Test
public void testSyncExternalUserDepth0() throws Exception {
syncConfig.user().setMembershipNestingDepth(0);
ExternalUser externalUser = idp.getUser(USER_ID);
sync(externalUser, SyncResult.Status.ADD);
Tree tree = r.getTree(userManager.getAuthorizable(USER_ID).getPath());
PropertyState extPrincipalNames = tree.getProperty(REP_EXTERNAL_PRINCIPAL_NAMES);
assertNotNull(extPrincipalNames);
assertEquals(0, extPrincipalNames.count());
}
@Test
public void testSyncExternalUserDepth1() throws Exception {
syncConfig.user().setMembershipNestingDepth(1);
ExternalUser externalUser = idp.getUser(USER_ID);
sync(externalUser, SyncResult.Status.ADD);
Tree tree = r.getTree(userManager.getAuthorizable(USER_ID).getPath());
PropertyState extPrincipalNames = tree.getProperty(REP_EXTERNAL_PRINCIPAL_NAMES);
assertNotNull(extPrincipalNames);
Set<String> pNames = SetUtils.toSet(extPrincipalNames.getValue(Type.STRINGS));
for (ExternalIdentityRef ref : externalUser.getDeclaredGroups()) {
assertTrue(pNames.remove(idp.getIdentity(ref).getPrincipalName()));
}
assertTrue(pNames.isEmpty());
}
@Test
public void testSyncExternalUserDepthInfinite() throws Exception {
syncConfig.user().setMembershipNestingDepth(Long.MAX_VALUE);
ExternalUser externalUser = idp.getUser(USER_ID);
sync(externalUser, SyncResult.Status.ADD);
Tree tree = r.getTree(userManager.getAuthorizable(USER_ID).getPath());
PropertyState extPrincipalNames = tree.getProperty(REP_EXTERNAL_PRINCIPAL_NAMES);
assertNotNull(extPrincipalNames);
Set<String> pNames = SetUtils.toSet(extPrincipalNames.getValue(Type.STRINGS));
Set<String> expected = new HashSet<>();
collectGroupPrincipals(expected, externalUser.getDeclaredGroups(), Long.MAX_VALUE);
assertEquals(expected, pNames);
}
@Test
public void testSyncExternalUserGroupConflict() throws Exception {
ExternalUser externalUser = idp.getUser(USER_ID);
// create a local group that collides with the external group membership
// i.e. doesn't have an rep:externalId set
ExternalIdentity externalGroup = idp.getIdentity(externalUser.getDeclaredGroups().iterator().next());
assertNotNull(externalGroup);
assertIgnored(externalUser, externalGroup, externalGroup.getId(), externalGroup.getPrincipalName(), null);
}
@Test
public void testSyncExternalUserGroupConflictDifferentIDP() throws Exception {
ExternalUser externalUser = idp.getUser(USER_ID);
// create a local group that collides with the external group membership
// i.e. belongs to a different IDP
ExternalIdentityRef ref = externalUser.getDeclaredGroups().iterator().next();
ExternalIdentity externalGroup = idp.getIdentity(ref);
assertNotNull(externalGroup);
assertIgnored(externalUser, externalGroup, externalGroup.getId(), externalGroup.getPrincipalName(),
new ExternalIdentityRef(ref.getId(), ref.getProviderName()+"_mod"));
}
@Test
public void testSyncExternalUserGroupConflictPrincipalNameMismatch() throws Exception {
ExternalUser externalUser = idp.getUser(USER_ID);
ExternalIdentityRef ref = externalUser.getDeclaredGroups().iterator().next();
ExternalIdentity externalGroup = idp.getIdentity(ref);
assertNotNull(externalGroup);
// create a local group that has the same ID but a mismatching principal name
// and verify that the group is ignored;
assertIgnored(externalUser, externalGroup, externalGroup.getId(), externalGroup.getPrincipalName()+"mismatch", ref);
}
@Test
public void testSyncExternalUserGroupConflictPrincipalNameCaseMismatch() throws Exception {
ExternalUser externalUser = idp.getUser(USER_ID);
ExternalIdentityRef ref = externalUser.getDeclaredGroups().iterator().next();
ExternalIdentity externalGroup = idp.getIdentity(ref);
assertNotNull(externalGroup);
// create a local group that has the same ID but a mismatching principal name (only case)
// and verify that the group is ignored;
assertIgnored(externalUser, externalGroup, externalGroup.getId(), externalGroup.getPrincipalName().toUpperCase(), ref);
}
@Test
public void testSyncExternalUserGroupConflictIdCaseMismatch() throws Exception {
ExternalUser externalUser = idp.getUser(USER_ID);
ExternalIdentityRef ref = externalUser.getDeclaredGroups().iterator().next();
ExternalIdentity externalGroup = idp.getIdentity(ref);
assertNotNull(externalGroup);
// create a local group that has the case-mismatch in ID/principal name
// and verify that the external group is ignored;
assertIgnored(externalUser, externalGroup, externalGroup.getId().toUpperCase(), externalGroup.getPrincipalName(), null);
}
private void assertIgnored(@NotNull ExternalUser externalUser, @NotNull ExternalIdentity externalGroup,
@NotNull String existingId, @NotNull String existingPrincipalName, @Nullable ExternalIdentityRef existingGroupRef) throws Exception {
Group g = userManager.createGroup(existingId, new PrincipalImpl(existingPrincipalName), null);
if (existingGroupRef != null) {
g.setProperty(REP_EXTERNAL_ID, getValueFactory().createValue(existingGroupRef.getString()));
}
r.commit();
// sync the user with dynamic membership enabled
sync(externalUser, SyncResult.Status.ADD);
// retrieve rep:externalPrincipalNames
Tree tree = r.getTree(userManager.getAuthorizable(USER_ID).getPath());
PropertyState extPrincipalNames = tree.getProperty(REP_EXTERNAL_PRINCIPAL_NAMES);
assertNotNull(extPrincipalNames);
// the resulting rep:externalPrincipalNames must NOT contain the name of the colliding principal
Set<String> pNames = SetUtils.toSet(extPrincipalNames.getValue(Type.STRINGS));
assertFalse(pNames + " must not contain " + externalGroup.getPrincipalName(), pNames.contains(externalGroup.getPrincipalName()));
}
@Test
public void testSyncExternalUserGroupConflictWithUser() throws Exception {
ExternalUser externalUser = idp.getUser(USER_ID);
// create a local user that collides with the first external group ref
ExternalIdentityRef ref = externalUser.getDeclaredGroups().iterator().next();
ExternalIdentity externalGroup = idp.getIdentity(ref);
User collision = userManager.createUser(externalGroup.getId(), null, new PrincipalImpl(externalGroup.getPrincipalName()), null);
r.commit();
// sync the user with dynamic membership enabled
sync(externalUser, SyncResult.Status.ADD);
// retrieve rep:externalPrincipalNames
Tree tree = r.getTree(userManager.getAuthorizable(USER_ID).getPath());
PropertyState extPrincipalNames = tree.getProperty(REP_EXTERNAL_PRINCIPAL_NAMES);
assertNotNull(extPrincipalNames);
// the resulting rep:externalPrincipalNames must NOT contain the name of the colliding principal
Set<String> pNames = SetUtils.toSet(extPrincipalNames.getValue(Type.STRINGS));
assertFalse(pNames + " must not contain " + externalGroup.getPrincipalName(), pNames.contains(externalGroup.getPrincipalName()));
}
@Test
public void testSyncExternalUserExistingGroups() throws Exception {
// verify group membership of the previously synced user
Authorizable a = userManager.getAuthorizable(previouslySyncedUser.getId());
assertSyncedMembership(userManager, a, previouslySyncedUser, Long.MAX_VALUE);
// resync the previously synced user with dynamic-membership enabled.
syncContext.setForceUserSync(true);
syncConfig.user().setMembershipExpirationTime(-1);
syncContext.sync(previouslySyncedUser);
Tree t = r.getTree(a.getPath());
assertFalse(t.hasProperty(REP_EXTERNAL_PRINCIPAL_NAMES));
assertSyncedMembership(userManager, a, previouslySyncedUser);
}
@Test
public void testSyncExternalGroup() throws Exception {
ExternalGroup gr = idp.getGroup(GROUP_ID);
syncContext.sync(gr);
assertNull(userManager.getAuthorizable(gr.getId()));
assertFalse(r.hasPendingChanges());
}
@Test
public void testSyncExternalGroupVerifyStatus() throws Exception {
ExternalGroup gr = idp.getGroup(GROUP_ID);
SyncResult result = syncContext.sync(gr);
SyncResult.Status expectedStatus = (syncConfig.group().getDynamicGroups()) ? SyncResult.Status.ADD : SyncResult.Status.NOP;
assertEquals(expectedStatus, result.getStatus());
result = syncContext.sync(gr);
assertEquals(SyncResult.Status.NOP, result.getStatus());
syncContext.setForceGroupSync(true);
result = syncContext.sync(gr);
expectedStatus = (syncConfig.group().getDynamicGroups()) ? SyncResult.Status.UPDATE : SyncResult.Status.NOP;
assertEquals(expectedStatus, result.getStatus());
}
@Test
public void testSyncExternalGroupExisting() throws Exception {
// create an external external group that already has been synced into the repo
ExternalGroup externalGroup = idp.getGroup(previouslySyncedUser.getDeclaredGroups().iterator().next().getId());
assertNotNull(externalGroup);
// synchronizing using DynamicSyncContext must update the existing group
syncContext.setForceGroupSync(true);
SyncResult result = syncContext.sync(externalGroup);
assertSame(SyncResult.Status.UPDATE, result.getStatus());
}
@Test
public void testSyncForeignExternalGroup() throws Exception {
ExternalGroup foreign = new TestIdentityProvider.ForeignExternalGroup();
SyncResult res = syncContext.sync(foreign);
assertNotNull(res);
assertSame(SyncResult.Status.FOREIGN, res.getStatus());
// expect {@code SyncedIdentity} in accordance with {@code sync(String userId)},
// where the authorizable is found to be linked to a different IDP.
SyncedIdentity si = res.getIdentity();
assertNotNull(si);
assertEquals(foreign.getId(), si.getId());
ExternalIdentityRef ref = si.getExternalIdRef();
assertNotNull(ref);
assertEquals(foreign.getExternalId(), ref);
assertTrue(si.isGroup());
assertEquals(-1, si.lastSynced());
assertFalse(r.hasPendingChanges());
}
@Test
public void testSyncExternalGroupRepositoryException() throws Exception {
Exception ex = new RepositoryException();
UserManager um = mock(UserManager.class);
when(um.getAuthorizable(any(String.class))).thenThrow(ex);
DynamicSyncContext ctx = new DynamicSyncContext(syncConfig, idp, um, valueFactory);
try {
ctx.sync(idp.getGroup(GROUP_ID));
fail();
} catch (SyncException e) {
assertEquals(ex, e.getCause());
}
}
@Test
public void testSyncUserByIdUpdate() throws Exception {
ExternalIdentity externalId = idp.getUser(ID_SECOND_USER);
Authorizable a = userManager.createUser(externalId.getId(), null);
a.setProperty(DefaultSyncContext.REP_EXTERNAL_ID, valueFactory.createValue(externalId.getExternalId().getString()));
syncContext.setForceUserSync(true);
SyncResult result = syncContext.sync(externalId.getId());
assertEquals(SyncResult.Status.UPDATE, result.getStatus());
Tree t = r.getTree(a.getPath());
assertTrue(t.hasProperty(REP_EXTERNAL_PRINCIPAL_NAMES));
}
@Test
public void testPreviouslySyncedIdentities() throws Exception {
Authorizable user = userManager.getAuthorizable(PREVIOUS_SYNCED_ID);
assertNotNull(user);
assertFalse(user.hasProperty(REP_EXTERNAL_PRINCIPAL_NAMES));
assertSyncedMembership(userManager, user, previouslySyncedUser, PREVIOUS_NESTING_DEPTH);
}
@Test
public void testSyncUserIdExistingGroupsMembershipNotExpired() throws Exception {
// make sure membership is not expired
long previousExpTime = syncConfig.user().getMembershipExpirationTime();
DefaultSyncConfig.User uc = syncConfig.user();
try {
uc.setMembershipExpirationTime(Long.MAX_VALUE);
syncContext.setForceUserSync(true);
syncContext.sync(previouslySyncedUser.getId());
Authorizable a = userManager.getAuthorizable(PREVIOUS_SYNCED_ID);
Tree t = r.getTree(a.getPath());
assertFalse(t.hasProperty(REP_EXTERNAL_PRINCIPAL_NAMES));
assertSyncedMembership(userManager, a, previouslySyncedUser);
} finally {
uc.setMembershipExpirationTime(previousExpTime);
}
}
@Test
public void testSyncUserIdExistingGroups() throws Exception {
// mark membership information as expired
long previousExpTime = syncConfig.user().getMembershipExpirationTime();
DefaultSyncConfig.User uc = syncConfig.user();
try {
uc.setMembershipExpirationTime(-1);
syncContext.setForceUserSync(true);
syncContext.sync(previouslySyncedUser.getId());
Authorizable a = userManager.getAuthorizable(PREVIOUS_SYNCED_ID);
Tree t = r.getTree(a.getPath());
boolean expectedMigration = (uc.getEnforceDynamicMembership() || syncConfig.group().getDynamicGroups());
if (expectedMigration) {
assertTrue(t.hasProperty(REP_EXTERNAL_PRINCIPAL_NAMES));
int expSize = getExpectedSyncedGroupRefs(uc.getMembershipNestingDepth(), idp, previouslySyncedUser).size();
assertEquals(expSize, t.getProperty(REP_EXTERNAL_PRINCIPAL_NAMES).count());
} else {
assertFalse(t.hasProperty(REP_EXTERNAL_PRINCIPAL_NAMES));
}
if (uc.getEnforceDynamicMembership() && !syncConfig.group().getDynamicGroups()) {
for (String id : getExpectedSyncedGroupIds(uc.getMembershipNestingDepth(), idp, previouslySyncedUser)) {
assertNull(userManager.getAuthorizable(id));
}
} else {
assertSyncedMembership(userManager, a, previouslySyncedUser);
}
} finally {
uc.setMembershipExpirationTime(previousExpTime);
}
}
@Test
public void testSyncMembershipWithNesting() throws Exception {
long nesting = 1;
syncConfig.user().setMembershipNestingDepth(nesting);
ExternalUser externalUser = idp.getUser(USER_ID);
sync(externalUser, SyncResult.Status.ADD);
Authorizable a = userManager.getAuthorizable(externalUser.getId());
assertDynamicMembership(externalUser, nesting);
// verify that the membership is always reflected in the rep:externalPrincipalNames property
// 1. membership nesting = -1
nesting = -1;
syncContext.syncMembership(externalUser, a, nesting);
assertDynamicMembership(a, externalUser, nesting);
// 2. membership nesting is > 0
nesting = Long.MAX_VALUE;
syncContext.syncMembership(externalUser, a, nesting);
assertDynamicMembership(a, externalUser, nesting);
}
@Test
public void testSyncMembershipWithChangedGroups() throws Exception {
long nesting = 1;
syncConfig.user().setMembershipNestingDepth(nesting);
ExternalUser externalUser = idp.getUser(USER_ID);
sync(externalUser, SyncResult.Status.ADD);
Authorizable a = userManager.getAuthorizable(externalUser.getId());
assertDynamicMembership(a, externalUser, nesting);
// sync user with modified membership => must be reflected
// 1. empty set of declared groups
ExternalUser mod = new TestUserWithGroupRefs(externalUser, Set.of());
syncContext.syncMembership(mod, a, nesting);
assertDynamicMembership(a, mod, nesting);
// 2. set with different groups than defined on IDP
mod = new TestUserWithGroupRefs(externalUser, Set.of(
idp.getGroup("a").getExternalId(),
idp.getGroup("aa").getExternalId(),
idp.getGroup("secondGroup").getExternalId()));
syncContext.syncMembership(mod, a, nesting);
assertDynamicMembership(a, mod, nesting);
}
@Test
public void testSyncMembershipWithEmptyExistingGroups() throws Exception {
long nesting = syncConfig.user().getMembershipNestingDepth();
Authorizable a = userManager.getAuthorizable(PREVIOUS_SYNCED_ID);
// sync user with modified membership => must be reflected
// 1. empty set of declared groups
ExternalUser mod = new TestUserWithGroupRefs(previouslySyncedUser, Set.of());
syncContext.syncMembership(mod, a, nesting);
assertSyncedMembership(userManager, a, mod, nesting);
}
@Test
public void testSyncMembershipWithChangedExistingGroups() throws Exception {
long nesting = syncConfig.user().getMembershipNestingDepth();
Authorizable a = userManager.getAuthorizable(PREVIOUS_SYNCED_ID);
// sync user with modified membership => must be reflected
// 2. set with different groups that defined on IDP
ExternalUser mod = new TestUserWithGroupRefs(previouslySyncedUser, Set.of(
idp.getGroup("a").getExternalId(),
idp.getGroup("aa").getExternalId(),
idp.getGroup("secondGroup").getExternalId()));
syncContext.syncMembership(mod, a, nesting);
// persist changes to have the modified membership being reflected through assertions that use queries
r.commit();
assertSyncedMembership(userManager, a, mod);
}
@Test
public void testSyncMembershipForExternalGroup() throws Exception {
// previously synced 'third-group' has declaredGroups (i.e. nested membership)
ExternalGroup externalGroup = idp.getGroup(previouslySyncedUser.getDeclaredGroups().iterator().next().getId());
Authorizable gr = userManager.getAuthorizable(externalGroup.getId());
syncContext.syncMembership(externalGroup, gr, 1);
assertFalse(gr.hasProperty(REP_EXTERNAL_PRINCIPAL_NAMES));
assertFalse(r.hasPendingChanges());
}
@Test
public void testSyncMembershipWithForeignGroups() throws Exception {
TestIdentityProvider.TestUser testuser = (TestIdentityProvider.TestUser) idp.getUser(ID_TEST_USER);
Set<ExternalIdentityRef> sameIdpGroups = getExpectedSyncedGroupRefs(syncConfig.user().getMembershipNestingDepth(), idp, testuser);
TestIdentityProvider.ForeignExternalGroup foreignGroup = new TestIdentityProvider.ForeignExternalGroup();
testuser.withGroups(foreignGroup.getExternalId());
assertNotEquals(sameIdpGroups, testuser.getDeclaredGroups());
sync(testuser, SyncResult.Status.ADD);
Authorizable a = userManager.getAuthorizable(ID_TEST_USER);
assertTrue(a.hasProperty(REP_EXTERNAL_PRINCIPAL_NAMES));
Value[] extPrincipalNames = a.getProperty(REP_EXTERNAL_PRINCIPAL_NAMES);
assertEquals(IterableUtils.size(sameIdpGroups), extPrincipalNames.length);
for (Value v : extPrincipalNames) {
assertNotEquals(foreignGroup.getPrincipalName(), v.getString());
}
}
@Test
public void testSyncMembershipWithUserRef() throws Exception {
TestIdentityProvider.TestUser testuser = (TestIdentityProvider.TestUser) idp.getUser(ID_TEST_USER);
Set<ExternalIdentityRef> groupRefs = getExpectedSyncedGroupRefs(syncConfig.user().getMembershipNestingDepth(), idp, testuser);
// verify that the conflicting user has not been synced before
assertNull(userManager.getAuthorizable(ID_SECOND_USER));
ExternalUser second = idp.getUser(ID_SECOND_USER);
testuser.withGroups(second.getExternalId());
assertFalse(IterableUtils.elementsEqual(groupRefs, testuser.getDeclaredGroups()));
sync(testuser, SyncResult.Status.ADD);
Authorizable a = userManager.getAuthorizable(ID_TEST_USER);
assertTrue(a.hasProperty(REP_EXTERNAL_PRINCIPAL_NAMES));
Value[] extPrincipalNames = a.getProperty(REP_EXTERNAL_PRINCIPAL_NAMES);
assertEquals(IterableUtils.size(groupRefs), extPrincipalNames.length);
for (Value v : extPrincipalNames) {
assertNotEquals(second.getPrincipalName(), v.getString());
}
}
@Test
public void testSyncMembershipWithUserConflict() throws Exception {
TestIdentityProvider.TestUser testuser = (TestIdentityProvider.TestUser) idp.getUser(ID_TEST_USER);
Set<ExternalIdentityRef> groupRefs = getExpectedSyncedGroupRefs(syncConfig.user().getMembershipNestingDepth(), idp, testuser);
// in contrast to 'testSyncMembershipWithUserRef' the conflicting group-ref refers to a user in the repository
// and the conflict is spotted as the existing synched identity is not a group.
testuser.withGroups(previouslySyncedUser.getExternalId());
assertFalse(IterableUtils.elementsEqual(groupRefs, testuser.getDeclaredGroups()));
sync(testuser, SyncResult.Status.ADD);
Authorizable a = userManager.getAuthorizable(ID_TEST_USER);
assertTrue(a.hasProperty(REP_EXTERNAL_PRINCIPAL_NAMES));
Value[] extPrincipalNames = a.getProperty(REP_EXTERNAL_PRINCIPAL_NAMES);
assertEquals(IterableUtils.size(groupRefs), extPrincipalNames.length);
for (Value v : extPrincipalNames) {
assertNotEquals(previouslySyncedUser.getPrincipalName(), v.getString());
}
}
@Test
public void testSyncMembershipDeclaredGroupsFails() throws Exception {
ExternalIdentityProvider extIdp = spy(idp);
ExternalUser externalUser = spy(extIdp.getUser(TestIdentityProvider.ID_TEST_USER));
syncContext.sync(externalUser);
clearInvocations(extIdp);
Authorizable a = userManager.getAuthorizable(externalUser.getId());
assertNotNull(a);
when(externalUser.getDeclaredGroups()).thenThrow(new ExternalIdentityException());
syncContext.syncMembership(externalUser, a, 1);
verify(extIdp, never()).getIdentity(any(ExternalIdentityRef.class));
}
@Test
public void testAutoMembership() throws Exception {
Group gr = userManager.createGroup("group" + UUID.randomUUID());
r.commit();
syncConfig.user().setAutoMembership(gr.getID(), "non-existing-group");
SyncResult result = syncContext.sync(idp.getUser(USER_ID));
assertSame(SyncResult.Status.ADD, result.getStatus());
User u = userManager.getAuthorizable(USER_ID, User.class);
assertFalse(gr.isDeclaredMember(u));
assertFalse(gr.isMember(u));
}
@Test
public void testConvertToDynamicMembershipAlreadyDynamic() throws Exception {
syncConfig.user().setMembershipNestingDepth(1);
ExternalUser externalUser = idp.getUser(USER_ID);
sync(externalUser, SyncResult.Status.ADD);
User user = userManager.getAuthorizable(externalUser.getId(), User.class);
assertNotNull(user);
assertFalse(syncContext.convertToDynamicMembership(user));
}
@Test
public void testConvertToDynamicMembership() throws Exception {
User user = userManager.getAuthorizable(PREVIOUS_SYNCED_ID, User.class);
assertNotNull(user);
assertFalse(user.hasProperty(REP_EXTERNAL_PRINCIPAL_NAMES));
assertFalse(user.hasProperty(REP_LAST_DYNAMIC_SYNC));
assertTrue(syncContext.convertToDynamicMembership(user));
assertTrue(user.hasProperty(REP_EXTERNAL_PRINCIPAL_NAMES));
assertTrue(user.hasProperty(REP_LAST_DYNAMIC_SYNC));
assertDeclaredGroups(previouslySyncedUser);
}
@Test
public void testConvertToDynamicMembershipForGroup() throws Exception {
Authorizable gr = when(mock(Authorizable.class).isGroup()).thenReturn(true).getMock();
assertFalse(syncContext.convertToDynamicMembership(gr));
}
static final class TestUserWithGroupRefs extends TestIdentityProvider.TestIdentity implements ExternalUser {
private final Iterable<ExternalIdentityRef> declaredGroupRefs;
TestUserWithGroupRefs(@NotNull ExternalUser base, @NotNull Iterable<ExternalIdentityRef> declaredGroupRefs) {
super(base);
this.declaredGroupRefs = declaredGroupRefs;
}
public String getPassword() {
return "";
}
@NotNull
@Override
public Iterable<ExternalIdentityRef> getDeclaredGroups() {
return declaredGroupRefs;
}
}
}
|
googleapis/google-cloud-java | 35,944 | java-optimization/proto-google-cloud-optimization-v1/src/main/java/com/google/cloud/optimization/v1/ShipmentTypeIncompatibility.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/optimization/v1/fleet_routing.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.optimization.v1;
/**
*
*
* <pre>
* Specifies incompatibilties between shipments depending on their
* shipment_type. The appearance of incompatible shipments on the same route is
* restricted based on the incompatibility mode.
* </pre>
*
* Protobuf type {@code google.cloud.optimization.v1.ShipmentTypeIncompatibility}
*/
public final class ShipmentTypeIncompatibility extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.optimization.v1.ShipmentTypeIncompatibility)
ShipmentTypeIncompatibilityOrBuilder {
private static final long serialVersionUID = 0L;
// Use ShipmentTypeIncompatibility.newBuilder() to construct.
private ShipmentTypeIncompatibility(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ShipmentTypeIncompatibility() {
types_ = com.google.protobuf.LazyStringArrayList.emptyList();
incompatibilityMode_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ShipmentTypeIncompatibility();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.optimization.v1.FleetRoutingProto
.internal_static_google_cloud_optimization_v1_ShipmentTypeIncompatibility_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.optimization.v1.FleetRoutingProto
.internal_static_google_cloud_optimization_v1_ShipmentTypeIncompatibility_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.optimization.v1.ShipmentTypeIncompatibility.class,
com.google.cloud.optimization.v1.ShipmentTypeIncompatibility.Builder.class);
}
/**
*
*
* <pre>
* Modes defining how the appearance of incompatible shipments are restricted
* on the same route.
* </pre>
*
* Protobuf enum {@code
* google.cloud.optimization.v1.ShipmentTypeIncompatibility.IncompatibilityMode}
*/
public enum IncompatibilityMode implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Unspecified incompatibility mode. This value should never be used.
* </pre>
*
* <code>INCOMPATIBILITY_MODE_UNSPECIFIED = 0;</code>
*/
INCOMPATIBILITY_MODE_UNSPECIFIED(0),
/**
*
*
* <pre>
* In this mode, two shipments with incompatible types can never share the
* same vehicle.
* </pre>
*
* <code>NOT_PERFORMED_BY_SAME_VEHICLE = 1;</code>
*/
NOT_PERFORMED_BY_SAME_VEHICLE(1),
/**
*
*
* <pre>
* For two shipments with incompatible types with the
* `NOT_IN_SAME_VEHICLE_SIMULTANEOUSLY` incompatibility mode:
*
* * If both are pickups only (no deliveries) or deliveries only (no
* pickups), they cannot share the same vehicle at all.
* * If one of the shipments has a delivery and the other a pickup, the two
* shipments can share the same vehicle iff the former shipment is
* delivered before the latter is picked up.
* </pre>
*
* <code>NOT_IN_SAME_VEHICLE_SIMULTANEOUSLY = 2;</code>
*/
NOT_IN_SAME_VEHICLE_SIMULTANEOUSLY(2),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Unspecified incompatibility mode. This value should never be used.
* </pre>
*
* <code>INCOMPATIBILITY_MODE_UNSPECIFIED = 0;</code>
*/
public static final int INCOMPATIBILITY_MODE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* In this mode, two shipments with incompatible types can never share the
* same vehicle.
* </pre>
*
* <code>NOT_PERFORMED_BY_SAME_VEHICLE = 1;</code>
*/
public static final int NOT_PERFORMED_BY_SAME_VEHICLE_VALUE = 1;
/**
*
*
* <pre>
* For two shipments with incompatible types with the
* `NOT_IN_SAME_VEHICLE_SIMULTANEOUSLY` incompatibility mode:
*
* * If both are pickups only (no deliveries) or deliveries only (no
* pickups), they cannot share the same vehicle at all.
* * If one of the shipments has a delivery and the other a pickup, the two
* shipments can share the same vehicle iff the former shipment is
* delivered before the latter is picked up.
* </pre>
*
* <code>NOT_IN_SAME_VEHICLE_SIMULTANEOUSLY = 2;</code>
*/
public static final int NOT_IN_SAME_VEHICLE_SIMULTANEOUSLY_VALUE = 2;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static IncompatibilityMode valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static IncompatibilityMode forNumber(int value) {
switch (value) {
case 0:
return INCOMPATIBILITY_MODE_UNSPECIFIED;
case 1:
return NOT_PERFORMED_BY_SAME_VEHICLE;
case 2:
return NOT_IN_SAME_VEHICLE_SIMULTANEOUSLY;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<IncompatibilityMode>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<IncompatibilityMode>
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<IncompatibilityMode>() {
public IncompatibilityMode findValueByNumber(int number) {
return IncompatibilityMode.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.optimization.v1.ShipmentTypeIncompatibility.getDescriptor()
.getEnumTypes()
.get(0);
}
private static final IncompatibilityMode[] VALUES = values();
public static IncompatibilityMode valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private IncompatibilityMode(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.optimization.v1.ShipmentTypeIncompatibility.IncompatibilityMode)
}
public static final int TYPES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList types_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
*
*
* <pre>
* List of incompatible types. Two shipments having different `shipment_types`
* among those listed are "incompatible".
* </pre>
*
* <code>repeated string types = 1;</code>
*
* @return A list containing the types.
*/
public com.google.protobuf.ProtocolStringList getTypesList() {
return types_;
}
/**
*
*
* <pre>
* List of incompatible types. Two shipments having different `shipment_types`
* among those listed are "incompatible".
* </pre>
*
* <code>repeated string types = 1;</code>
*
* @return The count of types.
*/
public int getTypesCount() {
return types_.size();
}
/**
*
*
* <pre>
* List of incompatible types. Two shipments having different `shipment_types`
* among those listed are "incompatible".
* </pre>
*
* <code>repeated string types = 1;</code>
*
* @param index The index of the element to return.
* @return The types at the given index.
*/
public java.lang.String getTypes(int index) {
return types_.get(index);
}
/**
*
*
* <pre>
* List of incompatible types. Two shipments having different `shipment_types`
* among those listed are "incompatible".
* </pre>
*
* <code>repeated string types = 1;</code>
*
* @param index The index of the value to return.
* @return The bytes of the types at the given index.
*/
public com.google.protobuf.ByteString getTypesBytes(int index) {
return types_.getByteString(index);
}
public static final int INCOMPATIBILITY_MODE_FIELD_NUMBER = 2;
private int incompatibilityMode_ = 0;
/**
*
*
* <pre>
* Mode applied to the incompatibility.
* </pre>
*
* <code>
* .google.cloud.optimization.v1.ShipmentTypeIncompatibility.IncompatibilityMode incompatibility_mode = 2;
* </code>
*
* @return The enum numeric value on the wire for incompatibilityMode.
*/
@java.lang.Override
public int getIncompatibilityModeValue() {
return incompatibilityMode_;
}
/**
*
*
* <pre>
* Mode applied to the incompatibility.
* </pre>
*
* <code>
* .google.cloud.optimization.v1.ShipmentTypeIncompatibility.IncompatibilityMode incompatibility_mode = 2;
* </code>
*
* @return The incompatibilityMode.
*/
@java.lang.Override
public com.google.cloud.optimization.v1.ShipmentTypeIncompatibility.IncompatibilityMode
getIncompatibilityMode() {
com.google.cloud.optimization.v1.ShipmentTypeIncompatibility.IncompatibilityMode result =
com.google.cloud.optimization.v1.ShipmentTypeIncompatibility.IncompatibilityMode.forNumber(
incompatibilityMode_);
return result == null
? com.google.cloud.optimization.v1.ShipmentTypeIncompatibility.IncompatibilityMode
.UNRECOGNIZED
: result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < types_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, types_.getRaw(i));
}
if (incompatibilityMode_
!= com.google.cloud.optimization.v1.ShipmentTypeIncompatibility.IncompatibilityMode
.INCOMPATIBILITY_MODE_UNSPECIFIED
.getNumber()) {
output.writeEnum(2, incompatibilityMode_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < types_.size(); i++) {
dataSize += computeStringSizeNoTag(types_.getRaw(i));
}
size += dataSize;
size += 1 * getTypesList().size();
}
if (incompatibilityMode_
!= com.google.cloud.optimization.v1.ShipmentTypeIncompatibility.IncompatibilityMode
.INCOMPATIBILITY_MODE_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(2, incompatibilityMode_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.optimization.v1.ShipmentTypeIncompatibility)) {
return super.equals(obj);
}
com.google.cloud.optimization.v1.ShipmentTypeIncompatibility other =
(com.google.cloud.optimization.v1.ShipmentTypeIncompatibility) obj;
if (!getTypesList().equals(other.getTypesList())) return false;
if (incompatibilityMode_ != other.incompatibilityMode_) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getTypesCount() > 0) {
hash = (37 * hash) + TYPES_FIELD_NUMBER;
hash = (53 * hash) + getTypesList().hashCode();
}
hash = (37 * hash) + INCOMPATIBILITY_MODE_FIELD_NUMBER;
hash = (53 * hash) + incompatibilityMode_;
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.optimization.v1.ShipmentTypeIncompatibility parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.optimization.v1.ShipmentTypeIncompatibility parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.optimization.v1.ShipmentTypeIncompatibility parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.optimization.v1.ShipmentTypeIncompatibility parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.optimization.v1.ShipmentTypeIncompatibility parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.optimization.v1.ShipmentTypeIncompatibility parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.optimization.v1.ShipmentTypeIncompatibility parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.optimization.v1.ShipmentTypeIncompatibility parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.optimization.v1.ShipmentTypeIncompatibility parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.optimization.v1.ShipmentTypeIncompatibility parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.optimization.v1.ShipmentTypeIncompatibility parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.optimization.v1.ShipmentTypeIncompatibility parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.optimization.v1.ShipmentTypeIncompatibility prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Specifies incompatibilties between shipments depending on their
* shipment_type. The appearance of incompatible shipments on the same route is
* restricted based on the incompatibility mode.
* </pre>
*
* Protobuf type {@code google.cloud.optimization.v1.ShipmentTypeIncompatibility}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.optimization.v1.ShipmentTypeIncompatibility)
com.google.cloud.optimization.v1.ShipmentTypeIncompatibilityOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.optimization.v1.FleetRoutingProto
.internal_static_google_cloud_optimization_v1_ShipmentTypeIncompatibility_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.optimization.v1.FleetRoutingProto
.internal_static_google_cloud_optimization_v1_ShipmentTypeIncompatibility_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.optimization.v1.ShipmentTypeIncompatibility.class,
com.google.cloud.optimization.v1.ShipmentTypeIncompatibility.Builder.class);
}
// Construct using com.google.cloud.optimization.v1.ShipmentTypeIncompatibility.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
types_ = com.google.protobuf.LazyStringArrayList.emptyList();
incompatibilityMode_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.optimization.v1.FleetRoutingProto
.internal_static_google_cloud_optimization_v1_ShipmentTypeIncompatibility_descriptor;
}
@java.lang.Override
public com.google.cloud.optimization.v1.ShipmentTypeIncompatibility
getDefaultInstanceForType() {
return com.google.cloud.optimization.v1.ShipmentTypeIncompatibility.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.optimization.v1.ShipmentTypeIncompatibility build() {
com.google.cloud.optimization.v1.ShipmentTypeIncompatibility result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.optimization.v1.ShipmentTypeIncompatibility buildPartial() {
com.google.cloud.optimization.v1.ShipmentTypeIncompatibility result =
new com.google.cloud.optimization.v1.ShipmentTypeIncompatibility(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.optimization.v1.ShipmentTypeIncompatibility result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
types_.makeImmutable();
result.types_ = types_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.incompatibilityMode_ = incompatibilityMode_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.optimization.v1.ShipmentTypeIncompatibility) {
return mergeFrom((com.google.cloud.optimization.v1.ShipmentTypeIncompatibility) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.optimization.v1.ShipmentTypeIncompatibility other) {
if (other
== com.google.cloud.optimization.v1.ShipmentTypeIncompatibility.getDefaultInstance())
return this;
if (!other.types_.isEmpty()) {
if (types_.isEmpty()) {
types_ = other.types_;
bitField0_ |= 0x00000001;
} else {
ensureTypesIsMutable();
types_.addAll(other.types_);
}
onChanged();
}
if (other.incompatibilityMode_ != 0) {
setIncompatibilityModeValue(other.getIncompatibilityModeValue());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
ensureTypesIsMutable();
types_.add(s);
break;
} // case 10
case 16:
{
incompatibilityMode_ = input.readEnum();
bitField0_ |= 0x00000002;
break;
} // case 16
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.LazyStringArrayList types_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureTypesIsMutable() {
if (!types_.isModifiable()) {
types_ = new com.google.protobuf.LazyStringArrayList(types_);
}
bitField0_ |= 0x00000001;
}
/**
*
*
* <pre>
* List of incompatible types. Two shipments having different `shipment_types`
* among those listed are "incompatible".
* </pre>
*
* <code>repeated string types = 1;</code>
*
* @return A list containing the types.
*/
public com.google.protobuf.ProtocolStringList getTypesList() {
types_.makeImmutable();
return types_;
}
/**
*
*
* <pre>
* List of incompatible types. Two shipments having different `shipment_types`
* among those listed are "incompatible".
* </pre>
*
* <code>repeated string types = 1;</code>
*
* @return The count of types.
*/
public int getTypesCount() {
return types_.size();
}
/**
*
*
* <pre>
* List of incompatible types. Two shipments having different `shipment_types`
* among those listed are "incompatible".
* </pre>
*
* <code>repeated string types = 1;</code>
*
* @param index The index of the element to return.
* @return The types at the given index.
*/
public java.lang.String getTypes(int index) {
return types_.get(index);
}
/**
*
*
* <pre>
* List of incompatible types. Two shipments having different `shipment_types`
* among those listed are "incompatible".
* </pre>
*
* <code>repeated string types = 1;</code>
*
* @param index The index of the value to return.
* @return The bytes of the types at the given index.
*/
public com.google.protobuf.ByteString getTypesBytes(int index) {
return types_.getByteString(index);
}
/**
*
*
* <pre>
* List of incompatible types. Two shipments having different `shipment_types`
* among those listed are "incompatible".
* </pre>
*
* <code>repeated string types = 1;</code>
*
* @param index The index to set the value at.
* @param value The types to set.
* @return This builder for chaining.
*/
public Builder setTypes(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureTypesIsMutable();
types_.set(index, value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* List of incompatible types. Two shipments having different `shipment_types`
* among those listed are "incompatible".
* </pre>
*
* <code>repeated string types = 1;</code>
*
* @param value The types to add.
* @return This builder for chaining.
*/
public Builder addTypes(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureTypesIsMutable();
types_.add(value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* List of incompatible types. Two shipments having different `shipment_types`
* among those listed are "incompatible".
* </pre>
*
* <code>repeated string types = 1;</code>
*
* @param values The types to add.
* @return This builder for chaining.
*/
public Builder addAllTypes(java.lang.Iterable<java.lang.String> values) {
ensureTypesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, types_);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* List of incompatible types. Two shipments having different `shipment_types`
* among those listed are "incompatible".
* </pre>
*
* <code>repeated string types = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearTypes() {
types_ = com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
;
onChanged();
return this;
}
/**
*
*
* <pre>
* List of incompatible types. Two shipments having different `shipment_types`
* among those listed are "incompatible".
* </pre>
*
* <code>repeated string types = 1;</code>
*
* @param value The bytes of the types to add.
* @return This builder for chaining.
*/
public Builder addTypesBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureTypesIsMutable();
types_.add(value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int incompatibilityMode_ = 0;
/**
*
*
* <pre>
* Mode applied to the incompatibility.
* </pre>
*
* <code>
* .google.cloud.optimization.v1.ShipmentTypeIncompatibility.IncompatibilityMode incompatibility_mode = 2;
* </code>
*
* @return The enum numeric value on the wire for incompatibilityMode.
*/
@java.lang.Override
public int getIncompatibilityModeValue() {
return incompatibilityMode_;
}
/**
*
*
* <pre>
* Mode applied to the incompatibility.
* </pre>
*
* <code>
* .google.cloud.optimization.v1.ShipmentTypeIncompatibility.IncompatibilityMode incompatibility_mode = 2;
* </code>
*
* @param value The enum numeric value on the wire for incompatibilityMode to set.
* @return This builder for chaining.
*/
public Builder setIncompatibilityModeValue(int value) {
incompatibilityMode_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Mode applied to the incompatibility.
* </pre>
*
* <code>
* .google.cloud.optimization.v1.ShipmentTypeIncompatibility.IncompatibilityMode incompatibility_mode = 2;
* </code>
*
* @return The incompatibilityMode.
*/
@java.lang.Override
public com.google.cloud.optimization.v1.ShipmentTypeIncompatibility.IncompatibilityMode
getIncompatibilityMode() {
com.google.cloud.optimization.v1.ShipmentTypeIncompatibility.IncompatibilityMode result =
com.google.cloud.optimization.v1.ShipmentTypeIncompatibility.IncompatibilityMode
.forNumber(incompatibilityMode_);
return result == null
? com.google.cloud.optimization.v1.ShipmentTypeIncompatibility.IncompatibilityMode
.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Mode applied to the incompatibility.
* </pre>
*
* <code>
* .google.cloud.optimization.v1.ShipmentTypeIncompatibility.IncompatibilityMode incompatibility_mode = 2;
* </code>
*
* @param value The incompatibilityMode to set.
* @return This builder for chaining.
*/
public Builder setIncompatibilityMode(
com.google.cloud.optimization.v1.ShipmentTypeIncompatibility.IncompatibilityMode value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
incompatibilityMode_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Mode applied to the incompatibility.
* </pre>
*
* <code>
* .google.cloud.optimization.v1.ShipmentTypeIncompatibility.IncompatibilityMode incompatibility_mode = 2;
* </code>
*
* @return This builder for chaining.
*/
public Builder clearIncompatibilityMode() {
bitField0_ = (bitField0_ & ~0x00000002);
incompatibilityMode_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.optimization.v1.ShipmentTypeIncompatibility)
}
// @@protoc_insertion_point(class_scope:google.cloud.optimization.v1.ShipmentTypeIncompatibility)
private static final com.google.cloud.optimization.v1.ShipmentTypeIncompatibility
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.optimization.v1.ShipmentTypeIncompatibility();
}
public static com.google.cloud.optimization.v1.ShipmentTypeIncompatibility getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ShipmentTypeIncompatibility> PARSER =
new com.google.protobuf.AbstractParser<ShipmentTypeIncompatibility>() {
@java.lang.Override
public ShipmentTypeIncompatibility parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ShipmentTypeIncompatibility> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ShipmentTypeIncompatibility> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.optimization.v1.ShipmentTypeIncompatibility getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/storm | 36,212 | storm-client/src/jvm/org/apache/storm/generated/ExecutorStats.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Autogenerated by Thrift Compiler (0.22.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.storm.generated;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.22.0)")
public class ExecutorStats implements org.apache.storm.thrift.TBase<ExecutorStats, ExecutorStats._Fields>, java.io.Serializable, Cloneable, Comparable<ExecutorStats> {
private static final org.apache.storm.thrift.protocol.TStruct STRUCT_DESC = new org.apache.storm.thrift.protocol.TStruct("ExecutorStats");
private static final org.apache.storm.thrift.protocol.TField EMITTED_FIELD_DESC = new org.apache.storm.thrift.protocol.TField("emitted", org.apache.storm.thrift.protocol.TType.MAP, (short)1);
private static final org.apache.storm.thrift.protocol.TField TRANSFERRED_FIELD_DESC = new org.apache.storm.thrift.protocol.TField("transferred", org.apache.storm.thrift.protocol.TType.MAP, (short)2);
private static final org.apache.storm.thrift.protocol.TField SPECIFIC_FIELD_DESC = new org.apache.storm.thrift.protocol.TField("specific", org.apache.storm.thrift.protocol.TType.STRUCT, (short)3);
private static final org.apache.storm.thrift.protocol.TField RATE_FIELD_DESC = new org.apache.storm.thrift.protocol.TField("rate", org.apache.storm.thrift.protocol.TType.DOUBLE, (short)4);
private static final org.apache.storm.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new ExecutorStatsStandardSchemeFactory();
private static final org.apache.storm.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new ExecutorStatsTupleSchemeFactory();
private @org.apache.storm.thrift.annotation.Nullable java.util.Map<java.lang.String,java.util.Map<java.lang.String,java.lang.Long>> emitted; // required
private @org.apache.storm.thrift.annotation.Nullable java.util.Map<java.lang.String,java.util.Map<java.lang.String,java.lang.Long>> transferred; // required
private @org.apache.storm.thrift.annotation.Nullable ExecutorSpecificStats specific; // required
private double rate; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.storm.thrift.TFieldIdEnum {
EMITTED((short)1, "emitted"),
TRANSFERRED((short)2, "transferred"),
SPECIFIC((short)3, "specific"),
RATE((short)4, "rate");
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
@org.apache.storm.thrift.annotation.Nullable
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // EMITTED
return EMITTED;
case 2: // TRANSFERRED
return TRANSFERRED;
case 3: // SPECIFIC
return SPECIFIC;
case 4: // RATE
return RATE;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
@org.apache.storm.thrift.annotation.Nullable
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
@Override
public short getThriftFieldId() {
return _thriftId;
}
@Override
public java.lang.String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __RATE_ISSET_ID = 0;
private byte __isset_bitfield = 0;
public static final java.util.Map<_Fields, org.apache.storm.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.storm.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.storm.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.EMITTED, new org.apache.storm.thrift.meta_data.FieldMetaData("emitted", org.apache.storm.thrift.TFieldRequirementType.REQUIRED,
new org.apache.storm.thrift.meta_data.MapMetaData(org.apache.storm.thrift.protocol.TType.MAP,
new org.apache.storm.thrift.meta_data.FieldValueMetaData(org.apache.storm.thrift.protocol.TType.STRING),
new org.apache.storm.thrift.meta_data.MapMetaData(org.apache.storm.thrift.protocol.TType.MAP,
new org.apache.storm.thrift.meta_data.FieldValueMetaData(org.apache.storm.thrift.protocol.TType.STRING),
new org.apache.storm.thrift.meta_data.FieldValueMetaData(org.apache.storm.thrift.protocol.TType.I64)))));
tmpMap.put(_Fields.TRANSFERRED, new org.apache.storm.thrift.meta_data.FieldMetaData("transferred", org.apache.storm.thrift.TFieldRequirementType.REQUIRED,
new org.apache.storm.thrift.meta_data.MapMetaData(org.apache.storm.thrift.protocol.TType.MAP,
new org.apache.storm.thrift.meta_data.FieldValueMetaData(org.apache.storm.thrift.protocol.TType.STRING),
new org.apache.storm.thrift.meta_data.MapMetaData(org.apache.storm.thrift.protocol.TType.MAP,
new org.apache.storm.thrift.meta_data.FieldValueMetaData(org.apache.storm.thrift.protocol.TType.STRING),
new org.apache.storm.thrift.meta_data.FieldValueMetaData(org.apache.storm.thrift.protocol.TType.I64)))));
tmpMap.put(_Fields.SPECIFIC, new org.apache.storm.thrift.meta_data.FieldMetaData("specific", org.apache.storm.thrift.TFieldRequirementType.REQUIRED,
new org.apache.storm.thrift.meta_data.StructMetaData(org.apache.storm.thrift.protocol.TType.STRUCT, ExecutorSpecificStats.class)));
tmpMap.put(_Fields.RATE, new org.apache.storm.thrift.meta_data.FieldMetaData("rate", org.apache.storm.thrift.TFieldRequirementType.REQUIRED,
new org.apache.storm.thrift.meta_data.FieldValueMetaData(org.apache.storm.thrift.protocol.TType.DOUBLE)));
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.storm.thrift.meta_data.FieldMetaData.addStructMetaDataMap(ExecutorStats.class, metaDataMap);
}
public ExecutorStats() {
}
public ExecutorStats(
java.util.Map<java.lang.String,java.util.Map<java.lang.String,java.lang.Long>> emitted,
java.util.Map<java.lang.String,java.util.Map<java.lang.String,java.lang.Long>> transferred,
ExecutorSpecificStats specific,
double rate)
{
this();
this.emitted = emitted;
this.transferred = transferred;
this.specific = specific;
this.rate = rate;
set_rate_isSet(true);
}
/**
* Performs a deep copy on <i>other</i>.
*/
public ExecutorStats(ExecutorStats other) {
__isset_bitfield = other.__isset_bitfield;
if (other.is_set_emitted()) {
java.util.Map<java.lang.String,java.util.Map<java.lang.String,java.lang.Long>> __this__emitted = new java.util.HashMap<java.lang.String,java.util.Map<java.lang.String,java.lang.Long>>(other.emitted.size());
for (java.util.Map.Entry<java.lang.String, java.util.Map<java.lang.String,java.lang.Long>> other_element : other.emitted.entrySet()) {
java.lang.String other_element_key = other_element.getKey();
java.util.Map<java.lang.String,java.lang.Long> other_element_value = other_element.getValue();
java.lang.String __this__emitted_copy_key = other_element_key;
java.util.Map<java.lang.String,java.lang.Long> __this__emitted_copy_value = new java.util.HashMap<java.lang.String,java.lang.Long>(other_element_value);
__this__emitted.put(__this__emitted_copy_key, __this__emitted_copy_value);
}
this.emitted = __this__emitted;
}
if (other.is_set_transferred()) {
java.util.Map<java.lang.String,java.util.Map<java.lang.String,java.lang.Long>> __this__transferred = new java.util.HashMap<java.lang.String,java.util.Map<java.lang.String,java.lang.Long>>(other.transferred.size());
for (java.util.Map.Entry<java.lang.String, java.util.Map<java.lang.String,java.lang.Long>> other_element : other.transferred.entrySet()) {
java.lang.String other_element_key = other_element.getKey();
java.util.Map<java.lang.String,java.lang.Long> other_element_value = other_element.getValue();
java.lang.String __this__transferred_copy_key = other_element_key;
java.util.Map<java.lang.String,java.lang.Long> __this__transferred_copy_value = new java.util.HashMap<java.lang.String,java.lang.Long>(other_element_value);
__this__transferred.put(__this__transferred_copy_key, __this__transferred_copy_value);
}
this.transferred = __this__transferred;
}
if (other.is_set_specific()) {
this.specific = new ExecutorSpecificStats(other.specific);
}
this.rate = other.rate;
}
@Override
public ExecutorStats deepCopy() {
return new ExecutorStats(this);
}
@Override
public void clear() {
this.emitted = null;
this.transferred = null;
this.specific = null;
set_rate_isSet(false);
this.rate = 0.0;
}
public int get_emitted_size() {
return (this.emitted == null) ? 0 : this.emitted.size();
}
public void put_to_emitted(java.lang.String key, java.util.Map<java.lang.String,java.lang.Long> val) {
if (this.emitted == null) {
this.emitted = new java.util.HashMap<java.lang.String,java.util.Map<java.lang.String,java.lang.Long>>();
}
this.emitted.put(key, val);
}
@org.apache.storm.thrift.annotation.Nullable
public java.util.Map<java.lang.String,java.util.Map<java.lang.String,java.lang.Long>> get_emitted() {
return this.emitted;
}
public void set_emitted(@org.apache.storm.thrift.annotation.Nullable java.util.Map<java.lang.String,java.util.Map<java.lang.String,java.lang.Long>> emitted) {
this.emitted = emitted;
}
public void unset_emitted() {
this.emitted = null;
}
/** Returns true if field emitted is set (has been assigned a value) and false otherwise */
public boolean is_set_emitted() {
return this.emitted != null;
}
public void set_emitted_isSet(boolean value) {
if (!value) {
this.emitted = null;
}
}
public int get_transferred_size() {
return (this.transferred == null) ? 0 : this.transferred.size();
}
public void put_to_transferred(java.lang.String key, java.util.Map<java.lang.String,java.lang.Long> val) {
if (this.transferred == null) {
this.transferred = new java.util.HashMap<java.lang.String,java.util.Map<java.lang.String,java.lang.Long>>();
}
this.transferred.put(key, val);
}
@org.apache.storm.thrift.annotation.Nullable
public java.util.Map<java.lang.String,java.util.Map<java.lang.String,java.lang.Long>> get_transferred() {
return this.transferred;
}
public void set_transferred(@org.apache.storm.thrift.annotation.Nullable java.util.Map<java.lang.String,java.util.Map<java.lang.String,java.lang.Long>> transferred) {
this.transferred = transferred;
}
public void unset_transferred() {
this.transferred = null;
}
/** Returns true if field transferred is set (has been assigned a value) and false otherwise */
public boolean is_set_transferred() {
return this.transferred != null;
}
public void set_transferred_isSet(boolean value) {
if (!value) {
this.transferred = null;
}
}
@org.apache.storm.thrift.annotation.Nullable
public ExecutorSpecificStats get_specific() {
return this.specific;
}
public void set_specific(@org.apache.storm.thrift.annotation.Nullable ExecutorSpecificStats specific) {
this.specific = specific;
}
public void unset_specific() {
this.specific = null;
}
/** Returns true if field specific is set (has been assigned a value) and false otherwise */
public boolean is_set_specific() {
return this.specific != null;
}
public void set_specific_isSet(boolean value) {
if (!value) {
this.specific = null;
}
}
public double get_rate() {
return this.rate;
}
public void set_rate(double rate) {
this.rate = rate;
set_rate_isSet(true);
}
public void unset_rate() {
__isset_bitfield = org.apache.storm.thrift.EncodingUtils.clearBit(__isset_bitfield, __RATE_ISSET_ID);
}
/** Returns true if field rate is set (has been assigned a value) and false otherwise */
public boolean is_set_rate() {
return org.apache.storm.thrift.EncodingUtils.testBit(__isset_bitfield, __RATE_ISSET_ID);
}
public void set_rate_isSet(boolean value) {
__isset_bitfield = org.apache.storm.thrift.EncodingUtils.setBit(__isset_bitfield, __RATE_ISSET_ID, value);
}
@Override
public void setFieldValue(_Fields field, @org.apache.storm.thrift.annotation.Nullable java.lang.Object value) {
switch (field) {
case EMITTED:
if (value == null) {
unset_emitted();
} else {
set_emitted((java.util.Map<java.lang.String,java.util.Map<java.lang.String,java.lang.Long>>)value);
}
break;
case TRANSFERRED:
if (value == null) {
unset_transferred();
} else {
set_transferred((java.util.Map<java.lang.String,java.util.Map<java.lang.String,java.lang.Long>>)value);
}
break;
case SPECIFIC:
if (value == null) {
unset_specific();
} else {
set_specific((ExecutorSpecificStats)value);
}
break;
case RATE:
if (value == null) {
unset_rate();
} else {
set_rate((java.lang.Double)value);
}
break;
}
}
@org.apache.storm.thrift.annotation.Nullable
@Override
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
case EMITTED:
return get_emitted();
case TRANSFERRED:
return get_transferred();
case SPECIFIC:
return get_specific();
case RATE:
return get_rate();
}
throw new java.lang.IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
@Override
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
case EMITTED:
return is_set_emitted();
case TRANSFERRED:
return is_set_transferred();
case SPECIFIC:
return is_set_specific();
case RATE:
return is_set_rate();
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that instanceof ExecutorStats)
return this.equals((ExecutorStats)that);
return false;
}
public boolean equals(ExecutorStats that) {
if (that == null)
return false;
if (this == that)
return true;
boolean this_present_emitted = true && this.is_set_emitted();
boolean that_present_emitted = true && that.is_set_emitted();
if (this_present_emitted || that_present_emitted) {
if (!(this_present_emitted && that_present_emitted))
return false;
if (!this.emitted.equals(that.emitted))
return false;
}
boolean this_present_transferred = true && this.is_set_transferred();
boolean that_present_transferred = true && that.is_set_transferred();
if (this_present_transferred || that_present_transferred) {
if (!(this_present_transferred && that_present_transferred))
return false;
if (!this.transferred.equals(that.transferred))
return false;
}
boolean this_present_specific = true && this.is_set_specific();
boolean that_present_specific = true && that.is_set_specific();
if (this_present_specific || that_present_specific) {
if (!(this_present_specific && that_present_specific))
return false;
if (!this.specific.equals(that.specific))
return false;
}
boolean this_present_rate = true;
boolean that_present_rate = true;
if (this_present_rate || that_present_rate) {
if (!(this_present_rate && that_present_rate))
return false;
if (this.rate != that.rate)
return false;
}
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
hashCode = hashCode * 8191 + ((is_set_emitted()) ? 131071 : 524287);
if (is_set_emitted())
hashCode = hashCode * 8191 + emitted.hashCode();
hashCode = hashCode * 8191 + ((is_set_transferred()) ? 131071 : 524287);
if (is_set_transferred())
hashCode = hashCode * 8191 + transferred.hashCode();
hashCode = hashCode * 8191 + ((is_set_specific()) ? 131071 : 524287);
if (is_set_specific())
hashCode = hashCode * 8191 + specific.hashCode();
hashCode = hashCode * 8191 + org.apache.storm.thrift.TBaseHelper.hashCode(rate);
return hashCode;
}
@Override
public int compareTo(ExecutorStats other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = java.lang.Boolean.compare(is_set_emitted(), other.is_set_emitted());
if (lastComparison != 0) {
return lastComparison;
}
if (is_set_emitted()) {
lastComparison = org.apache.storm.thrift.TBaseHelper.compareTo(this.emitted, other.emitted);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(is_set_transferred(), other.is_set_transferred());
if (lastComparison != 0) {
return lastComparison;
}
if (is_set_transferred()) {
lastComparison = org.apache.storm.thrift.TBaseHelper.compareTo(this.transferred, other.transferred);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(is_set_specific(), other.is_set_specific());
if (lastComparison != 0) {
return lastComparison;
}
if (is_set_specific()) {
lastComparison = org.apache.storm.thrift.TBaseHelper.compareTo(this.specific, other.specific);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(is_set_rate(), other.is_set_rate());
if (lastComparison != 0) {
return lastComparison;
}
if (is_set_rate()) {
lastComparison = org.apache.storm.thrift.TBaseHelper.compareTo(this.rate, other.rate);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
@org.apache.storm.thrift.annotation.Nullable
@Override
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
@Override
public void read(org.apache.storm.thrift.protocol.TProtocol iprot) throws org.apache.storm.thrift.TException {
scheme(iprot).read(iprot, this);
}
@Override
public void write(org.apache.storm.thrift.protocol.TProtocol oprot) throws org.apache.storm.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("ExecutorStats(");
boolean first = true;
sb.append("emitted:");
if (this.emitted == null) {
sb.append("null");
} else {
sb.append(this.emitted);
}
first = false;
if (!first) sb.append(", ");
sb.append("transferred:");
if (this.transferred == null) {
sb.append("null");
} else {
sb.append(this.transferred);
}
first = false;
if (!first) sb.append(", ");
sb.append("specific:");
if (this.specific == null) {
sb.append("null");
} else {
sb.append(this.specific);
}
first = false;
if (!first) sb.append(", ");
sb.append("rate:");
sb.append(this.rate);
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.storm.thrift.TException {
// check for required fields
if (!is_set_emitted()) {
throw new org.apache.storm.thrift.protocol.TProtocolException("Required field 'emitted' is unset! Struct:" + toString());
}
if (!is_set_transferred()) {
throw new org.apache.storm.thrift.protocol.TProtocolException("Required field 'transferred' is unset! Struct:" + toString());
}
if (!is_set_specific()) {
throw new org.apache.storm.thrift.protocol.TProtocolException("Required field 'specific' is unset! Struct:" + toString());
}
if (!is_set_rate()) {
throw new org.apache.storm.thrift.protocol.TProtocolException("Required field 'rate' is unset! Struct:" + toString());
}
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.storm.thrift.protocol.TCompactProtocol(new org.apache.storm.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.storm.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.storm.thrift.protocol.TCompactProtocol(new org.apache.storm.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.storm.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class ExecutorStatsStandardSchemeFactory implements org.apache.storm.thrift.scheme.SchemeFactory {
@Override
public ExecutorStatsStandardScheme getScheme() {
return new ExecutorStatsStandardScheme();
}
}
private static class ExecutorStatsStandardScheme extends org.apache.storm.thrift.scheme.StandardScheme<ExecutorStats> {
@Override
public void read(org.apache.storm.thrift.protocol.TProtocol iprot, ExecutorStats struct) throws org.apache.storm.thrift.TException {
org.apache.storm.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.storm.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // EMITTED
if (schemeField.type == org.apache.storm.thrift.protocol.TType.MAP) {
{
org.apache.storm.thrift.protocol.TMap _map350 = iprot.readMapBegin();
struct.emitted = new java.util.HashMap<java.lang.String,java.util.Map<java.lang.String,java.lang.Long>>(2*_map350.size);
@org.apache.storm.thrift.annotation.Nullable java.lang.String _key351;
@org.apache.storm.thrift.annotation.Nullable java.util.Map<java.lang.String,java.lang.Long> _val352;
for (int _i353 = 0; _i353 < _map350.size; ++_i353)
{
_key351 = iprot.readString();
{
org.apache.storm.thrift.protocol.TMap _map354 = iprot.readMapBegin();
_val352 = new java.util.HashMap<java.lang.String,java.lang.Long>(2*_map354.size);
@org.apache.storm.thrift.annotation.Nullable java.lang.String _key355;
long _val356;
for (int _i357 = 0; _i357 < _map354.size; ++_i357)
{
_key355 = iprot.readString();
_val356 = iprot.readI64();
_val352.put(_key355, _val356);
}
iprot.readMapEnd();
}
struct.emitted.put(_key351, _val352);
}
iprot.readMapEnd();
}
struct.set_emitted_isSet(true);
} else {
org.apache.storm.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // TRANSFERRED
if (schemeField.type == org.apache.storm.thrift.protocol.TType.MAP) {
{
org.apache.storm.thrift.protocol.TMap _map358 = iprot.readMapBegin();
struct.transferred = new java.util.HashMap<java.lang.String,java.util.Map<java.lang.String,java.lang.Long>>(2*_map358.size);
@org.apache.storm.thrift.annotation.Nullable java.lang.String _key359;
@org.apache.storm.thrift.annotation.Nullable java.util.Map<java.lang.String,java.lang.Long> _val360;
for (int _i361 = 0; _i361 < _map358.size; ++_i361)
{
_key359 = iprot.readString();
{
org.apache.storm.thrift.protocol.TMap _map362 = iprot.readMapBegin();
_val360 = new java.util.HashMap<java.lang.String,java.lang.Long>(2*_map362.size);
@org.apache.storm.thrift.annotation.Nullable java.lang.String _key363;
long _val364;
for (int _i365 = 0; _i365 < _map362.size; ++_i365)
{
_key363 = iprot.readString();
_val364 = iprot.readI64();
_val360.put(_key363, _val364);
}
iprot.readMapEnd();
}
struct.transferred.put(_key359, _val360);
}
iprot.readMapEnd();
}
struct.set_transferred_isSet(true);
} else {
org.apache.storm.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 3: // SPECIFIC
if (schemeField.type == org.apache.storm.thrift.protocol.TType.STRUCT) {
struct.specific = new ExecutorSpecificStats();
struct.specific.read(iprot);
struct.set_specific_isSet(true);
} else {
org.apache.storm.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 4: // RATE
if (schemeField.type == org.apache.storm.thrift.protocol.TType.DOUBLE) {
struct.rate = iprot.readDouble();
struct.set_rate_isSet(true);
} else {
org.apache.storm.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.storm.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
struct.validate();
}
@Override
public void write(org.apache.storm.thrift.protocol.TProtocol oprot, ExecutorStats struct) throws org.apache.storm.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.emitted != null) {
oprot.writeFieldBegin(EMITTED_FIELD_DESC);
{
oprot.writeMapBegin(new org.apache.storm.thrift.protocol.TMap(org.apache.storm.thrift.protocol.TType.STRING, org.apache.storm.thrift.protocol.TType.MAP, struct.emitted.size()));
for (java.util.Map.Entry<java.lang.String, java.util.Map<java.lang.String,java.lang.Long>> _iter366 : struct.emitted.entrySet())
{
oprot.writeString(_iter366.getKey());
{
oprot.writeMapBegin(new org.apache.storm.thrift.protocol.TMap(org.apache.storm.thrift.protocol.TType.STRING, org.apache.storm.thrift.protocol.TType.I64, _iter366.getValue().size()));
for (java.util.Map.Entry<java.lang.String, java.lang.Long> _iter367 : _iter366.getValue().entrySet())
{
oprot.writeString(_iter367.getKey());
oprot.writeI64(_iter367.getValue());
}
oprot.writeMapEnd();
}
}
oprot.writeMapEnd();
}
oprot.writeFieldEnd();
}
if (struct.transferred != null) {
oprot.writeFieldBegin(TRANSFERRED_FIELD_DESC);
{
oprot.writeMapBegin(new org.apache.storm.thrift.protocol.TMap(org.apache.storm.thrift.protocol.TType.STRING, org.apache.storm.thrift.protocol.TType.MAP, struct.transferred.size()));
for (java.util.Map.Entry<java.lang.String, java.util.Map<java.lang.String,java.lang.Long>> _iter368 : struct.transferred.entrySet())
{
oprot.writeString(_iter368.getKey());
{
oprot.writeMapBegin(new org.apache.storm.thrift.protocol.TMap(org.apache.storm.thrift.protocol.TType.STRING, org.apache.storm.thrift.protocol.TType.I64, _iter368.getValue().size()));
for (java.util.Map.Entry<java.lang.String, java.lang.Long> _iter369 : _iter368.getValue().entrySet())
{
oprot.writeString(_iter369.getKey());
oprot.writeI64(_iter369.getValue());
}
oprot.writeMapEnd();
}
}
oprot.writeMapEnd();
}
oprot.writeFieldEnd();
}
if (struct.specific != null) {
oprot.writeFieldBegin(SPECIFIC_FIELD_DESC);
struct.specific.write(oprot);
oprot.writeFieldEnd();
}
oprot.writeFieldBegin(RATE_FIELD_DESC);
oprot.writeDouble(struct.rate);
oprot.writeFieldEnd();
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class ExecutorStatsTupleSchemeFactory implements org.apache.storm.thrift.scheme.SchemeFactory {
@Override
public ExecutorStatsTupleScheme getScheme() {
return new ExecutorStatsTupleScheme();
}
}
private static class ExecutorStatsTupleScheme extends org.apache.storm.thrift.scheme.TupleScheme<ExecutorStats> {
@Override
public void write(org.apache.storm.thrift.protocol.TProtocol prot, ExecutorStats struct) throws org.apache.storm.thrift.TException {
org.apache.storm.thrift.protocol.TTupleProtocol oprot = (org.apache.storm.thrift.protocol.TTupleProtocol) prot;
{
oprot.writeI32(struct.emitted.size());
for (java.util.Map.Entry<java.lang.String, java.util.Map<java.lang.String,java.lang.Long>> _iter370 : struct.emitted.entrySet())
{
oprot.writeString(_iter370.getKey());
{
oprot.writeI32(_iter370.getValue().size());
for (java.util.Map.Entry<java.lang.String, java.lang.Long> _iter371 : _iter370.getValue().entrySet())
{
oprot.writeString(_iter371.getKey());
oprot.writeI64(_iter371.getValue());
}
}
}
}
{
oprot.writeI32(struct.transferred.size());
for (java.util.Map.Entry<java.lang.String, java.util.Map<java.lang.String,java.lang.Long>> _iter372 : struct.transferred.entrySet())
{
oprot.writeString(_iter372.getKey());
{
oprot.writeI32(_iter372.getValue().size());
for (java.util.Map.Entry<java.lang.String, java.lang.Long> _iter373 : _iter372.getValue().entrySet())
{
oprot.writeString(_iter373.getKey());
oprot.writeI64(_iter373.getValue());
}
}
}
}
struct.specific.write(oprot);
oprot.writeDouble(struct.rate);
}
@Override
public void read(org.apache.storm.thrift.protocol.TProtocol prot, ExecutorStats struct) throws org.apache.storm.thrift.TException {
org.apache.storm.thrift.protocol.TTupleProtocol iprot = (org.apache.storm.thrift.protocol.TTupleProtocol) prot;
{
org.apache.storm.thrift.protocol.TMap _map374 = iprot.readMapBegin(org.apache.storm.thrift.protocol.TType.STRING, org.apache.storm.thrift.protocol.TType.MAP);
struct.emitted = new java.util.HashMap<java.lang.String,java.util.Map<java.lang.String,java.lang.Long>>(2*_map374.size);
@org.apache.storm.thrift.annotation.Nullable java.lang.String _key375;
@org.apache.storm.thrift.annotation.Nullable java.util.Map<java.lang.String,java.lang.Long> _val376;
for (int _i377 = 0; _i377 < _map374.size; ++_i377)
{
_key375 = iprot.readString();
{
org.apache.storm.thrift.protocol.TMap _map378 = iprot.readMapBegin(org.apache.storm.thrift.protocol.TType.STRING, org.apache.storm.thrift.protocol.TType.I64);
_val376 = new java.util.HashMap<java.lang.String,java.lang.Long>(2*_map378.size);
@org.apache.storm.thrift.annotation.Nullable java.lang.String _key379;
long _val380;
for (int _i381 = 0; _i381 < _map378.size; ++_i381)
{
_key379 = iprot.readString();
_val380 = iprot.readI64();
_val376.put(_key379, _val380);
}
}
struct.emitted.put(_key375, _val376);
}
}
struct.set_emitted_isSet(true);
{
org.apache.storm.thrift.protocol.TMap _map382 = iprot.readMapBegin(org.apache.storm.thrift.protocol.TType.STRING, org.apache.storm.thrift.protocol.TType.MAP);
struct.transferred = new java.util.HashMap<java.lang.String,java.util.Map<java.lang.String,java.lang.Long>>(2*_map382.size);
@org.apache.storm.thrift.annotation.Nullable java.lang.String _key383;
@org.apache.storm.thrift.annotation.Nullable java.util.Map<java.lang.String,java.lang.Long> _val384;
for (int _i385 = 0; _i385 < _map382.size; ++_i385)
{
_key383 = iprot.readString();
{
org.apache.storm.thrift.protocol.TMap _map386 = iprot.readMapBegin(org.apache.storm.thrift.protocol.TType.STRING, org.apache.storm.thrift.protocol.TType.I64);
_val384 = new java.util.HashMap<java.lang.String,java.lang.Long>(2*_map386.size);
@org.apache.storm.thrift.annotation.Nullable java.lang.String _key387;
long _val388;
for (int _i389 = 0; _i389 < _map386.size; ++_i389)
{
_key387 = iprot.readString();
_val388 = iprot.readI64();
_val384.put(_key387, _val388);
}
}
struct.transferred.put(_key383, _val384);
}
}
struct.set_transferred_isSet(true);
struct.specific = new ExecutorSpecificStats();
struct.specific.read(iprot);
struct.set_specific_isSet(true);
struct.rate = iprot.readDouble();
struct.set_rate_isSet(true);
}
}
private static <S extends org.apache.storm.thrift.scheme.IScheme> S scheme(org.apache.storm.thrift.protocol.TProtocol proto) {
return (org.apache.storm.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();
}
}
|
apache/storm | 36,282 | storm-client/src/jvm/org/apache/storm/daemon/worker/WorkerState.java | /**
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version
* 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package org.apache.storm.daemon.worker;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.function.Supplier;
import org.apache.storm.Config;
import org.apache.storm.Constants;
import org.apache.storm.StormTimer;
import org.apache.storm.cluster.IStateStorage;
import org.apache.storm.cluster.IStormClusterState;
import org.apache.storm.cluster.VersionedData;
import org.apache.storm.daemon.StormCommon;
import org.apache.storm.daemon.supervisor.AdvancedFSOps;
import org.apache.storm.daemon.worker.BackPressureTracker.BackpressureState;
import org.apache.storm.executor.IRunningExecutor;
import org.apache.storm.generated.Assignment;
import org.apache.storm.generated.Credentials;
import org.apache.storm.generated.DebugOptions;
import org.apache.storm.generated.Grouping;
import org.apache.storm.generated.InvalidTopologyException;
import org.apache.storm.generated.NodeInfo;
import org.apache.storm.generated.StormBase;
import org.apache.storm.generated.StormTopology;
import org.apache.storm.generated.StreamInfo;
import org.apache.storm.generated.TopologyStatus;
import org.apache.storm.grouping.Load;
import org.apache.storm.grouping.LoadMapping;
import org.apache.storm.hooks.IWorkerHook;
import org.apache.storm.messaging.ConnectionWithStatus;
import org.apache.storm.messaging.DeserializingConnectionCallback;
import org.apache.storm.messaging.IConnection;
import org.apache.storm.messaging.IConnectionCallback;
import org.apache.storm.messaging.IContext;
import org.apache.storm.messaging.TransportFactory;
import org.apache.storm.messaging.netty.BackPressureStatus;
import org.apache.storm.metrics2.StormMetricRegistry;
import org.apache.storm.policy.IWaitStrategy;
import org.apache.storm.security.auth.IAutoCredentials;
import org.apache.storm.serialization.ITupleSerializer;
import org.apache.storm.serialization.KryoTupleSerializer;
import org.apache.storm.shade.com.google.common.collect.ImmutableMap;
import org.apache.storm.shade.com.google.common.collect.Sets;
import org.apache.storm.task.WorkerTopologyContext;
import org.apache.storm.task.WorkerUserContext;
import org.apache.storm.tuple.AddressedTuple;
import org.apache.storm.tuple.Fields;
import org.apache.storm.utils.ConfigUtils;
import org.apache.storm.utils.JCQueue;
import org.apache.storm.utils.ObjectReader;
import org.apache.storm.utils.SupervisorIfaceFactory;
import org.apache.storm.utils.ThriftTopologyUtils;
import org.apache.storm.utils.Utils;
import org.apache.storm.utils.Utils.SmartThread;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class WorkerState {
private static final Logger LOG = LoggerFactory.getLogger(WorkerState.class);
private static final long LOAD_REFRESH_INTERVAL_MS = 5000L;
private static final int RESEND_BACKPRESSURE_SIZE = 10000;
private static long dropCount = 0;
final Map<String, Object> conf;
final IContext mqContext;
final IConnection receiver;
final String topologyId;
final String assignmentId;
private final Supplier<SupervisorIfaceFactory> supervisorIfaceSupplier;
final int port;
final String workerId;
final IStateStorage stateStorage;
final IStormClusterState stormClusterState;
// when worker bootup, worker will start to setup initial connections to
// other workers. When all connection is ready, we will count down this latch
// and spout and bolt will be activated, assuming the topology is not deactivated.
// used in worker only, keep it as a latch
final CountDownLatch isWorkerActive;
final AtomicBoolean isTopologyActive;
final AtomicReference<Map<String, DebugOptions>> stormComponentToDebug;
// local executors and localTaskIds running in this worker
final Set<List<Long>> localExecutors;
final ArrayList<Integer> localTaskIds;
// [taskId]-> JCQueue : initialized after local executors are initialized
final Map<Integer, JCQueue> localReceiveQueues = new HashMap<>();
final Map<String, Object> topologyConf;
final StormTopology topology;
final StormTopology systemTopology;
final Map<Integer, String> taskToComponent;
final Map<String, Map<String, Fields>> componentToStreamToFields;
final Map<String, List<Integer>> componentToSortedTasks;
final ConcurrentMap<String, Long> blobToLastKnownVersion;
final ReentrantReadWriteLock endpointSocketLock;
final AtomicReference<Map<Integer, NodeInfo>> cachedTaskToNodePort;
// cachedNodeToHost can be temporarily out of sync with cachedTaskToNodePort
final AtomicReference<Map<String, String>> cachedNodeToHost;
final AtomicReference<Map<NodeInfo, IConnection>> cachedNodeToPortSocket;
// executor id is in form [start_task_id end_task_id]
final Map<List<Long>, JCQueue> executorReceiveQueueMap;
final Map<Integer, JCQueue> taskToExecutorQueue;
final Runnable suicideCallback;
final Utils.UptimeComputer uptime;
final Map<String, Object> defaultSharedResources;
final Map<String, Object> userSharedResources;
final LoadMapping loadMapping;
final AtomicReference<Map<String, VersionedData<Assignment>>> assignmentVersions;
// Timers
final StormTimer heartbeatTimer = mkHaltingTimer("heartbeat-timer");
final StormTimer refreshLoadTimer = mkHaltingTimer("refresh-load-timer");
final StormTimer refreshConnectionsTimer = mkHaltingTimer("refresh-connections-timer");
final StormTimer refreshCredentialsTimer = mkHaltingTimer("refresh-credentials-timer");
final StormTimer checkForUpdatedBlobsTimer = mkHaltingTimer("check-for-updated-blobs-timer");
final StormTimer resetLogLevelsTimer = mkHaltingTimer("reset-log-levels-timer");
final StormTimer refreshActiveTimer = mkHaltingTimer("refresh-active-timer");
final StormTimer executorHeartbeatTimer = mkHaltingTimer("executor-heartbeat-timer");
final StormTimer flushTupleTimer = mkHaltingTimer("flush-tuple-timer");
final StormTimer userTimer = mkHaltingTimer("user-timer");
final StormTimer backPressureCheckTimer = mkHaltingTimer("backpressure-check-timer");
private final WorkerTransfer workerTransfer;
private final BackPressureTracker bpTracker;
private final List<IWorkerHook> deserializedWorkerHooks;
// global variables only used internally in class
private final Set<Integer> outboundTasks;
private final AtomicLong nextLoadUpdate = new AtomicLong(0);
private final boolean trySerializeLocal;
private final Collection<IAutoCredentials> autoCredentials;
private final AtomicReference<Credentials> credentialsAtom;
private final StormMetricRegistry metricRegistry;
public WorkerState(Map<String, Object> conf,
IContext mqContext,
String topologyId,
String assignmentId,
Supplier<SupervisorIfaceFactory> supervisorIfaceSupplier,
int port,
String workerId,
Map<String, Object> topologyConf,
IStateStorage stateStorage,
IStormClusterState stormClusterState,
Collection<IAutoCredentials> autoCredentials,
StormMetricRegistry metricRegistry,
Credentials initialCredentials) throws IOException,
InvalidTopologyException {
this.metricRegistry = metricRegistry;
this.autoCredentials = autoCredentials;
this.credentialsAtom = new AtomicReference(initialCredentials);
this.conf = conf;
this.supervisorIfaceSupplier = supervisorIfaceSupplier;
this.mqContext = (null != mqContext) ? mqContext :
TransportFactory.makeContext(topologyConf, metricRegistry);
this.topologyId = topologyId;
this.assignmentId = assignmentId;
this.port = port;
this.workerId = workerId;
this.stateStorage = stateStorage;
this.stormClusterState = stormClusterState;
this.localExecutors =
new HashSet<>(readWorkerExecutors(assignmentId, port, getLocalAssignment(this.stormClusterState, topologyId)));
this.isWorkerActive = new CountDownLatch(1);
this.isTopologyActive = new AtomicBoolean(false);
this.stormComponentToDebug = new AtomicReference<>();
this.topology = ConfigUtils.readSupervisorTopology(conf, topologyId, AdvancedFSOps.make(conf));
this.taskToComponent = StormCommon.stormTaskInfo(topology, topologyConf);
this.executorReceiveQueueMap = mkReceiveQueueMap(topologyConf, localExecutors, taskToComponent);
this.localTaskIds = new ArrayList<>();
this.taskToExecutorQueue = new HashMap<>();
this.blobToLastKnownVersion = new ConcurrentHashMap<>();
for (Map.Entry<List<Long>, JCQueue> entry : executorReceiveQueueMap.entrySet()) {
List<Integer> taskIds = StormCommon.executorIdToTasks(entry.getKey());
for (Integer taskId : taskIds) {
this.taskToExecutorQueue.put(taskId, entry.getValue());
}
this.localTaskIds.addAll(taskIds);
}
Collections.sort(localTaskIds);
this.topologyConf = topologyConf;
this.systemTopology = StormCommon.systemTopology(topologyConf, topology);
this.componentToStreamToFields = new HashMap<>();
for (String c : ThriftTopologyUtils.getComponentIds(systemTopology)) {
Map<String, Fields> streamToFields = new HashMap<>();
for (Map.Entry<String, StreamInfo> stream :
ThriftTopologyUtils.getComponentCommon(systemTopology, c).get_streams().entrySet()) {
streamToFields.put(stream.getKey(), new Fields(stream.getValue().get_output_fields()));
}
componentToStreamToFields.put(c, streamToFields);
}
this.componentToSortedTasks = Utils.reverseMap(taskToComponent);
this.componentToSortedTasks.values().forEach(Collections::sort);
this.endpointSocketLock = new ReentrantReadWriteLock();
this.cachedNodeToPortSocket = new AtomicReference<>(new HashMap<>());
this.cachedTaskToNodePort = new AtomicReference<>(new HashMap<>());
this.cachedNodeToHost = new AtomicReference<>(new HashMap<>());
this.suicideCallback = Utils.mkSuicideFn();
this.uptime = Utils.makeUptimeComputer();
this.defaultSharedResources = makeDefaultResources();
this.userSharedResources = makeUserResources();
this.loadMapping = new LoadMapping();
this.assignmentVersions = new AtomicReference<>(new HashMap<>());
this.outboundTasks = workerOutboundTasks();
this.trySerializeLocal = topologyConf.containsKey(Config.TOPOLOGY_TESTING_ALWAYS_TRY_SERIALIZE)
&& (Boolean) topologyConf.get(Config.TOPOLOGY_TESTING_ALWAYS_TRY_SERIALIZE);
if (trySerializeLocal) {
LOG.warn("WILL TRY TO SERIALIZE ALL TUPLES (Turn off {} for production", Config.TOPOLOGY_TESTING_ALWAYS_TRY_SERIALIZE);
}
int maxTaskId = getMaxTaskId(componentToSortedTasks);
this.workerTransfer = new WorkerTransfer(this, topologyConf, maxTaskId);
this.bpTracker = new BackPressureTracker(workerId, taskToExecutorQueue, metricRegistry, taskToComponent);
this.deserializedWorkerHooks = deserializeWorkerHooks();
LOG.info("Registering IConnectionCallbacks for {}:{}", assignmentId, port);
IConnectionCallback cb = new DeserializingConnectionCallback(topologyConf,
getWorkerTopologyContext(),
this::transferLocalBatch);
Supplier<Object> newConnectionResponse = () -> {
BackPressureStatus bpStatus = bpTracker.getCurrStatus();
LOG.info("Sending BackPressure status to new client. BPStatus: {}", bpStatus);
return bpStatus;
};
this.receiver = this.mqContext.bind(topologyId, port, cb, newConnectionResponse);
}
public static boolean isConnectionReady(IConnection connection) {
return !(connection instanceof ConnectionWithStatus)
|| ((ConnectionWithStatus) connection).status() == ConnectionWithStatus.Status.Ready;
}
private static int getMaxTaskId(Map<String, List<Integer>> componentToSortedTasks) {
int maxTaskId = -1;
for (List<Integer> integers : componentToSortedTasks.values()) {
if (!integers.isEmpty()) {
int tempMax = integers.stream().max(Integer::compareTo).get();
if (tempMax > maxTaskId) {
maxTaskId = tempMax;
}
}
}
return maxTaskId;
}
public List<IWorkerHook> getDeserializedWorkerHooks() {
return deserializedWorkerHooks;
}
public Map<String, Object> getConf() {
return conf;
}
public IConnection getReceiver() {
return receiver;
}
public String getTopologyId() {
return topologyId;
}
public int getPort() {
return port;
}
public String getWorkerId() {
return workerId;
}
public IStateStorage getStateStorage() {
return stateStorage;
}
public CountDownLatch getIsWorkerActive() {
return isWorkerActive;
}
public AtomicBoolean getIsTopologyActive() {
return isTopologyActive;
}
public AtomicReference<Map<String, DebugOptions>> getStormComponentToDebug() {
return stormComponentToDebug;
}
public Set<List<Long>> getLocalExecutors() {
return localExecutors;
}
public List<Integer> getLocalTaskIds() {
return localTaskIds;
}
public Map<Integer, JCQueue> getLocalReceiveQueues() {
return localReceiveQueues;
}
public Map<String, Object> getTopologyConf() {
return topologyConf;
}
public StormTopology getTopology() {
return topology;
}
public StormTopology getSystemTopology() {
return systemTopology;
}
public Map<Integer, String> getTaskToComponent() {
return taskToComponent;
}
public Map<String, Map<String, Fields>> getComponentToStreamToFields() {
return componentToStreamToFields;
}
public Map<String, List<Integer>> getComponentToSortedTasks() {
return componentToSortedTasks;
}
public Map<String, Long> getBlobToLastKnownVersion() {
return blobToLastKnownVersion;
}
public AtomicReference<Map<NodeInfo, IConnection>> getCachedNodeToPortSocket() {
return cachedNodeToPortSocket;
}
public Map<List<Long>, JCQueue> getExecutorReceiveQueueMap() {
return executorReceiveQueueMap;
}
public Runnable getSuicideCallback() {
return suicideCallback;
}
public Utils.UptimeComputer getUptime() {
return uptime;
}
public Map<String, Object> getDefaultSharedResources() {
return defaultSharedResources;
}
public Map<String, Object> getUserSharedResources() {
return userSharedResources;
}
public LoadMapping getLoadMapping() {
return loadMapping;
}
public AtomicReference<Map<String, VersionedData<Assignment>>> getAssignmentVersions() {
return assignmentVersions;
}
public StormTimer getUserTimer() {
return userTimer;
}
public SmartThread makeTransferThread() {
return workerTransfer.makeTransferThread();
}
public void suicideIfLocalAssignmentsChanged(Assignment assignment) {
boolean shouldHalt = false;
if (assignment != null) {
Set<List<Long>> assignedExecutors = new HashSet<>(readWorkerExecutors(assignmentId, port, assignment));
if (!localExecutors.equals(assignedExecutors)) {
LOG.info("Found conflicting assignments. We shouldn't be alive!" + " Assigned: " + assignedExecutors
+ ", Current: " + localExecutors);
shouldHalt = true;
}
} else {
LOG.info("Assigment is null. We should not be alive!");
shouldHalt = true;
}
if (shouldHalt) {
if (!ConfigUtils.isLocalMode(conf)) {
suicideCallback.run();
} else {
LOG.info("Local worker tried to commit suicide!");
}
}
}
public void refreshConnections() {
Assignment assignment = null;
try {
assignment = getLocalAssignment(stormClusterState, topologyId);
} catch (Exception e) {
LOG.warn("Failed to read assignment. This should only happen when topology is shutting down.", e);
}
suicideIfLocalAssignmentsChanged(assignment);
Set<NodeInfo> neededConnections = new HashSet<>();
Map<Integer, NodeInfo> newTaskToNodePort = new HashMap<>();
if (null != assignment) {
Map<Integer, NodeInfo> taskToNodePort = StormCommon.taskToNodeport(assignment.get_executor_node_port());
for (Map.Entry<Integer, NodeInfo> taskToNodePortEntry : taskToNodePort.entrySet()) {
Integer task = taskToNodePortEntry.getKey();
if (outboundTasks.contains(task)) {
newTaskToNodePort.put(task, taskToNodePortEntry.getValue());
if (!localTaskIds.contains(task)) {
neededConnections.add(taskToNodePortEntry.getValue());
}
}
}
}
final Set<NodeInfo> currentConnections = cachedNodeToPortSocket.get().keySet();
final Set<NodeInfo> newConnections = Sets.difference(neededConnections, currentConnections);
final Set<NodeInfo> removeConnections = Sets.difference(currentConnections, neededConnections);
Map<String, String> nodeHost = assignment != null ? assignment.get_node_host() : null;
// Add new connections atomically
cachedNodeToPortSocket.getAndUpdate(prev -> {
Map<NodeInfo, IConnection> next = new HashMap<>(prev);
for (NodeInfo nodeInfo : newConnections) {
next.put(nodeInfo,
mqContext.connect(
topologyId,
//nodeHost is not null here, as newConnections is only non-empty if assignment was not null above.
nodeHost.get(nodeInfo.get_node()), // Host
nodeInfo.get_port().iterator().next().intValue(), // Port
workerTransfer.getRemoteBackPressureStatus()));
}
return next;
});
try {
endpointSocketLock.writeLock().lock();
cachedTaskToNodePort.set(newTaskToNodePort);
} finally {
endpointSocketLock.writeLock().unlock();
}
// It is okay that cachedNodeToHost can be temporarily out of sync with cachedTaskToNodePort
if (nodeHost != null) {
cachedNodeToHost.set(nodeHost);
} else {
cachedNodeToHost.set(new HashMap<>());
}
for (NodeInfo nodeInfo : removeConnections) {
cachedNodeToPortSocket.get().get(nodeInfo).close();
}
// Remove old connections atomically
cachedNodeToPortSocket.getAndUpdate(prev -> {
Map<NodeInfo, IConnection> next = new HashMap<>(prev);
removeConnections.forEach(next::remove);
return next;
});
}
public void refreshStormActive() {
refreshStormActive(() -> refreshActiveTimer.schedule(0, this::refreshStormActive));
}
public void refreshStormActive(Runnable callback) {
StormBase base = stormClusterState.stormBase(topologyId, callback);
isTopologyActive.set(
(null != base)
&& (base.get_status() == TopologyStatus.ACTIVE));
if (null != base) {
Map<String, DebugOptions> debugOptionsMap = new HashMap<>(base.get_component_debug());
for (DebugOptions debugOptions : debugOptionsMap.values()) {
if (!debugOptions.is_set_samplingpct()) {
debugOptions.set_samplingpct(10);
}
if (!debugOptions.is_set_enable()) {
debugOptions.set_enable(false);
}
}
stormComponentToDebug.set(debugOptionsMap);
LOG.debug("Events debug options {}", stormComponentToDebug.get());
}
}
public void refreshLoad(List<IRunningExecutor> execs) {
Set<Integer> remoteTasks = Sets.difference(new HashSet<>(outboundTasks), new HashSet<>(localTaskIds));
Map<Integer, Double> localLoad = new HashMap<>();
for (IRunningExecutor exec : execs) {
double receiveLoad = exec.getReceiveQueue().getQueueLoad();
localLoad.put(exec.getExecutorId().get(0).intValue(), receiveLoad);
}
Map<Integer, Load> remoteLoad = new HashMap<>();
cachedNodeToPortSocket.get().values().stream().forEach(conn -> remoteLoad.putAll(conn.getLoad(remoteTasks)));
loadMapping.setLocal(localLoad);
loadMapping.setRemote(remoteLoad);
Long now = System.currentTimeMillis();
if (now > nextLoadUpdate.get()) {
receiver.sendLoadMetrics(localLoad);
nextLoadUpdate.set(now + LOAD_REFRESH_INTERVAL_MS);
}
}
// checks if the tasks which had back pressure are now free again. if so, sends an update to other workers
public void refreshBackPressureStatus() {
LOG.debug("Checking for change in Backpressure status on worker's tasks");
boolean bpSituationChanged = bpTracker.refreshBpTaskList();
if (bpSituationChanged) {
BackPressureStatus bpStatus = bpTracker.getCurrStatus();
receiver.sendBackPressureStatus(bpStatus);
}
}
/**
* we will wait all connections to be ready and then activate the spout/bolt when the worker bootup.
*/
public void activateWorkerWhenAllConnectionsReady() {
int delaySecs = 0;
int recurSecs = 1;
refreshActiveTimer.schedule(delaySecs,
() -> {
if (areAllConnectionsReady()) {
LOG.info("All connections are ready for worker {}:{} with id {}", assignmentId, port, workerId);
isWorkerActive.countDown();
} else {
refreshActiveTimer.schedule(recurSecs, () -> activateWorkerWhenAllConnectionsReady(), false, 0);
}
}
);
}
/* Not a Blocking call. If cannot emit, will add 'tuple' to pendingEmits and return 'false'. 'pendingEmits' can be null */
public boolean tryTransferRemote(AddressedTuple tuple, Queue<AddressedTuple> pendingEmits, ITupleSerializer serializer) {
return workerTransfer.tryTransferRemote(tuple, pendingEmits, serializer);
}
public void flushRemotes() throws InterruptedException {
workerTransfer.flushRemotes();
}
public boolean tryFlushRemotes() {
return workerTransfer.tryFlushRemotes();
}
// Receives msgs from remote workers and feeds them to local executors. If any receiving local executor is under Back Pressure,
// informs other workers about back pressure situation. Runs in the NettyWorker thread.
private void transferLocalBatch(ArrayList<AddressedTuple> tupleBatch) {
for (int i = 0; i < tupleBatch.size(); i++) {
AddressedTuple tuple = tupleBatch.get(i);
JCQueue queue = taskToExecutorQueue.get(tuple.dest);
// 1- try adding to main queue if its overflow is not empty
if (queue.isEmptyOverflow()) {
if (queue.tryPublish(tuple)) {
continue;
}
}
// 2- BP detected (i.e MainQ is full). So try adding to overflow
int currOverflowCount = queue.getOverflowCount();
// get BP state object so only have to lookup once
BackpressureState bpState = bpTracker.getBackpressureState(tuple.dest);
if (bpTracker.recordBackPressure(bpState)) {
receiver.sendBackPressureStatus(bpTracker.getCurrStatus());
bpTracker.setLastOverflowCount(bpState, currOverflowCount);
} else {
if (currOverflowCount - bpTracker.getLastOverflowCount(bpState) > RESEND_BACKPRESSURE_SIZE) {
// resend BP status, in case prev notification was missed or reordered
BackPressureStatus bpStatus = bpTracker.getCurrStatus();
receiver.sendBackPressureStatus(bpStatus);
bpTracker.setLastOverflowCount(bpState, currOverflowCount);
LOG.debug("Re-sent BackPressure Status. OverflowCount = {}, BP Status ID = {}. ", currOverflowCount, bpStatus.id);
}
}
if (!queue.tryPublishToOverflow(tuple)) {
dropMessage(tuple, queue);
}
}
}
private void dropMessage(AddressedTuple tuple, JCQueue queue) {
++dropCount;
queue.recordMsgDrop();
LOG.warn(
"Dropping message as overflow threshold has reached for Q = {}. OverflowCount = {}. Total Drop Count= {}, Dropped Message : {}",
queue.getQueueName(), queue.getOverflowCount(), dropCount, tuple);
}
public void checkSerialize(KryoTupleSerializer serializer, AddressedTuple tuple) {
if (trySerializeLocal) {
serializer.serialize(tuple.getTuple());
}
}
public final WorkerTopologyContext getWorkerTopologyContext() {
try {
String codeDir = ConfigUtils.supervisorStormResourcesPath(ConfigUtils.supervisorStormDistRoot(conf, topologyId));
String pidDir = ConfigUtils.workerPidsRoot(conf, topologyId);
return new WorkerTopologyContext(systemTopology, topologyConf, taskToComponent, componentToSortedTasks,
componentToStreamToFields, topologyId, codeDir, pidDir, port, localTaskIds,
defaultSharedResources,
userSharedResources, cachedTaskToNodePort, assignmentId, cachedNodeToHost);
} catch (IOException e) {
throw Utils.wrapInRuntime(e);
}
}
public final WorkerUserContext getWorkerUserContext() {
try {
String codeDir = ConfigUtils.supervisorStormResourcesPath(ConfigUtils.supervisorStormDistRoot(conf, topologyId));
String pidDir = ConfigUtils.workerPidsRoot(conf, topologyId);
return new WorkerUserContext(systemTopology, topologyConf, taskToComponent, componentToSortedTasks,
componentToStreamToFields, topologyId, codeDir, pidDir, port, localTaskIds,
defaultSharedResources,
userSharedResources, cachedTaskToNodePort, assignmentId, cachedNodeToHost);
} catch (IOException e) {
throw Utils.wrapInRuntime(e);
}
}
private List<IWorkerHook> deserializeWorkerHooks() {
List<IWorkerHook> myHookList = new ArrayList<>();
if (topology.is_set_worker_hooks()) {
for (ByteBuffer hook : topology.get_worker_hooks()) {
byte[] hookBytes = Utils.toByteArray(hook);
IWorkerHook hookObject = Utils.javaDeserialize(hookBytes, IWorkerHook.class);
myHookList.add(hookObject);
}
}
return myHookList;
}
public void runWorkerStartHooks() {
WorkerUserContext workerUserContext = getWorkerUserContext();
for (IWorkerHook hook : getDeserializedWorkerHooks()) {
hook.start(topologyConf, workerUserContext);
}
}
public void runWorkerShutdownHooks() {
for (IWorkerHook hook : getDeserializedWorkerHooks()) {
hook.shutdown();
}
}
public void closeResources() {
LOG.info("Shutting down default resources");
((ExecutorService) defaultSharedResources.get(WorkerTopologyContext.SHARED_EXECUTOR)).shutdownNow();
LOG.info("Shut down default resources");
}
public boolean areAllConnectionsReady() {
return cachedNodeToPortSocket.get().values()
.stream()
.map(WorkerState::isConnectionReady)
.reduce((left, right) -> left && right)
.orElse(true);
}
public Collection<IAutoCredentials> getAutoCredentials() {
return this.autoCredentials;
}
public Credentials getCredentials() {
return credentialsAtom.get();
}
public void setCredentials(Credentials credentials) {
this.credentialsAtom.set(credentials);
}
private List<List<Long>> readWorkerExecutors(String assignmentId, int port, Assignment assignment) {
List<List<Long>> executorsAssignedToThisWorker = new ArrayList<>();
executorsAssignedToThisWorker.add(Constants.SYSTEM_EXECUTOR_ID);
Map<List<Long>, NodeInfo> executorToNodePort =
assignment.get_executor_node_port();
for (Map.Entry<List<Long>, NodeInfo> entry : executorToNodePort.entrySet()) {
NodeInfo nodeInfo = entry.getValue();
if (nodeInfo.get_node().equals(assignmentId) && nodeInfo.get_port().iterator().next() == port) {
executorsAssignedToThisWorker.add(entry.getKey());
}
}
return executorsAssignedToThisWorker;
}
private Assignment getLocalAssignment(IStormClusterState stormClusterState, String topologyId) {
try (SupervisorIfaceFactory fac = supervisorIfaceSupplier.get()) {
return fac.getIface().getLocalAssignmentForStorm(topologyId);
} catch (Throwable e) {
//if any error/exception thrown, fetch it from zookeeper
Assignment assignment = stormClusterState.remoteAssignmentInfo(topologyId, null);
if (assignment == null) {
throw new RuntimeException("Failed to read worker assignment."
+ " Supervisor client threw exception, and assignment in Zookeeper was null", e);
}
return assignment;
}
}
private Map<List<Long>, JCQueue> mkReceiveQueueMap(Map<String, Object> topologyConf,
Set<List<Long>> executors, Map<Integer, String> taskToComponent) {
Integer recvQueueSize = ObjectReader.getInt(topologyConf.get(Config.TOPOLOGY_EXECUTOR_RECEIVE_BUFFER_SIZE));
Integer recvBatchSize = ObjectReader.getInt(topologyConf.get(Config.TOPOLOGY_PRODUCER_BATCH_SIZE));
Integer overflowLimit = ObjectReader.getInt(topologyConf.get(Config.TOPOLOGY_EXECUTOR_OVERFLOW_LIMIT));
if (recvBatchSize > recvQueueSize / 2) {
throw new IllegalArgumentException(Config.TOPOLOGY_PRODUCER_BATCH_SIZE + ":" + recvBatchSize
+ " is greater than half of " + Config.TOPOLOGY_EXECUTOR_RECEIVE_BUFFER_SIZE + ":"
+ recvQueueSize);
}
IWaitStrategy backPressureWaitStrategy = IWaitStrategy.createBackPressureWaitStrategy(topologyConf);
Map<List<Long>, JCQueue> receiveQueueMap = new HashMap<>();
for (List<Long> executor : executors) {
List<Integer> taskIds = StormCommon.executorIdToTasks(executor);
int taskId = taskIds.get(0);
String compId;
if (taskId == Constants.SYSTEM_TASK_ID) {
compId = Constants.SYSTEM_COMPONENT_ID;
} else {
compId = taskToComponent.get(taskId);
}
receiveQueueMap.put(executor, new JCQueue("receive-queue" + executor.toString(), "receive-queue",
recvQueueSize, overflowLimit, recvBatchSize, backPressureWaitStrategy,
this.getTopologyId(), compId, taskIds, this.getPort(), metricRegistry));
}
return receiveQueueMap;
}
private Map<String, Object> makeDefaultResources() {
int threadPoolSize = ObjectReader.getInt(conf.get(Config.TOPOLOGY_WORKER_SHARED_THREAD_POOL_SIZE));
return ImmutableMap.of(WorkerTopologyContext.SHARED_EXECUTOR, Executors.newFixedThreadPool(threadPoolSize));
}
private Map<String, Object> makeUserResources() {
return new HashMap<>();
}
private StormTimer mkHaltingTimer(String name) {
return new StormTimer(name, (thread, exception) -> {
LOG.error("Error when processing event", exception);
Utils.exitProcess(20, "Error when processing an event");
});
}
/**
* Get worker outbound tasks.
* @return seq of task ids that receive messages from this worker
*/
private Set<Integer> workerOutboundTasks() {
WorkerTopologyContext context = getWorkerTopologyContext();
Set<String> components = new HashSet<>();
for (Integer taskId : localTaskIds) {
for (Map<String, Grouping> value : context.getTargets(context.getComponentId(taskId)).values()) {
components.addAll(value.keySet());
}
}
Set<Integer> outboundTasks = new HashSet<>();
for (Map.Entry<String, List<Integer>> entry : Utils.reverseMap(taskToComponent).entrySet()) {
if (components.contains(entry.getKey())) {
outboundTasks.addAll(entry.getValue());
}
}
return outboundTasks;
}
public Set<Integer> getOutboundTasks() {
return this.outboundTasks;
}
/**
* Check if this worker has remote outbound tasks.
* @return true if this worker has remote outbound tasks; false otherwise.
*/
public boolean hasRemoteOutboundTasks() {
Set<Integer> remoteTasks = Sets.difference(new HashSet<>(outboundTasks), new HashSet<>(localTaskIds));
return !remoteTasks.isEmpty();
}
/**
* If all the tasks are local tasks, the topology has only one worker.
* @return true if this worker is the single worker; false otherwise.
*/
public boolean isSingleWorker() {
Set<Integer> nonLocalTasks = Sets.difference(getTaskToComponent().keySet(),
new HashSet<>(localTaskIds));
return nonLocalTasks.isEmpty();
}
public void haltWorkerTransfer() {
workerTransfer.haltTransferThd();
}
public JCQueue getTransferQueue() {
return workerTransfer.getTransferQueue();
}
public StormMetricRegistry getMetricRegistry() {
return metricRegistry;
}
public interface ILocalTransferCallback {
void transfer(ArrayList<AddressedTuple> tupleBatch);
}
}
|
googleads/google-media-framework-android | 35,998 | googlemediaframework/src/main/java/com/google/android/libraries/mediaframework/layeredvideo/PlaybackControlLayer.java | /**
Copyright 2014 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.google.android.libraries.mediaframework.layeredvideo;
import android.animation.Animator;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.pm.ActivityInfo;
import android.graphics.Color;
import android.graphics.PorterDuff;
import android.graphics.drawable.Drawable;
import android.os.Handler;
import android.os.Message;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import android.widget.SeekBar;
import android.widget.TextView;
import com.google.android.exoplayer.util.PlayerControl;
import com.google.android.libraries.mediaframework.R;
import com.google.android.libraries.mediaframework.exoplayerextensions.PlayerControlCallback;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Formatter;
import java.util.List;
import java.util.Locale;
/**
* A {@link Layer} that creates a customizable view for controlling video playback.
*
* <p>The view consists of:
*
* <p> 1) a top chrome which contains a logo, title, and set of action buttons.
*
* <p> 2) a bottom chrome which contains a seek bar, fullscreen button, and text views indicating
* the current time and total duration of the video.
*
* <p> 3) a translucent middle section which displays a pause/play button.
*
* <p>The view appears when the container containing the {@link PlaybackControlLayer} is tapped. It
* automatically disappears after a given time.
*
* <p>The view can be customized by:
*
* <p> 1) Setting the color of the top chrome, bottom chrome, and background - this is called
* the chrome tint color.
*
* <p> 2) Setting the color of the text - this is called the text color.
*
* <p> 3) Setting the color of the buttons and seek bar - this is called the control tint color.
*
* <p> 4) Setting the logo image displayed in the left of the top chrome.
*
* <p> 5) Setting the title of the video displayed in the left of the top chrome
* (and to the right of the logo).
*
* <p> 6) Adding an action button by providing an image, a content description, and a click handler. If
* there is enough room, the action buttons will be displayed on the right of the top chrome. If
* there is NOT enough room, an overflow button will be displayed. When the overflow button is
* clicked, a dialog box listing the content descriptions for the action buttons is displayed. The
* action is then triggered by selecting it from the dialog box.
*
* <p>The view is defined in the layout file: res/layout/playback_control_layer.xml.
*/
public class PlaybackControlLayer implements Layer, PlayerControlCallback {
/**
* In order to imbue the {@link PlaybackControlLayer} with the ability make the player fullscreen,
* a {@link PlaybackControlLayer.FullscreenCallback} must be assigned to it. The
* {@link PlaybackControlLayer.FullscreenCallback} implementation is responsible for
* hiding/showing the other views on the screen when the player enters/leaves fullscreen
* mode.
*/
public interface FullscreenCallback {
/**
* When triggered, the activity should hide any additional views.
*/
public void onGoToFullscreen();
/**
* When triggered, the activity should show any views that were hidden when the player
* went to fullscreen.
*/
public void onReturnFromFullscreen();
}
/**
* The {@link PlaybackControlLayer.PlayCallback} implementation will be called when the player
* plays the video (e.g. to request IMA ads) upon user taps on the play button.
*/
public interface PlayCallback {
public void onPlay();
}
/**
* Message handler which allows us to send delayed messages to the {@link PlaybackControlLayer}
* This is useful for fading out the view after a certain time.
*/
private static class MessageHandler extends Handler {
/**
* A reference to the {@link PlaybackControlLayer} that we are handling messages for.
*/
private final WeakReference<PlaybackControlLayer> playbackControlLayer;
/**
* @param playbackControlLayer The {@link PlaybackControlLayer} we should handle messages for.
*/
private MessageHandler(PlaybackControlLayer playbackControlLayer) {
this.playbackControlLayer = new WeakReference<PlaybackControlLayer>(playbackControlLayer);
}
/**
* Receives either a {@link PlaybackControlLayer#FADE_OUT} message (which hides the playback
* control layer) or a {@link PlaybackControlLayer#SHOW_PROGRESS} message (which updates the
* seek bar to reflect the progress in the video).
* @param msg Either a {@link PlaybackControlLayer#FADE_OUT} or
* {@link PlaybackControlLayer#SHOW_PROGRESS} message.
*/
@Override
public void handleMessage(Message msg) {
PlaybackControlLayer layer = playbackControlLayer.get();
if (layer == null || layer.getLayerManager().getControl() == null) {
return;
}
int pos;
switch (msg.what) {
case FADE_OUT:
layer.hide();
break;
case SHOW_PROGRESS:
pos = layer.updateProgress();
if (!layer.isSeekbarDragging
&& layer.isVisible
&& layer.getLayerManager().getControl().isPlaying()) {
msg = obtainMessage(SHOW_PROGRESS);
sendMessageDelayed(msg, 1000 - (pos % 1000));
}
break;
}
}
}
/**
* The chrome (the top chrome, bottom chrome, and background) is by default a slightly
* transparent black.
*/
public static final int DEFAULT_CHROME_COLOR = Color.argb(140, 0, 0, 0);
/**
* By default, there is no tint to the controls.
*/
public static final int DEFAULT_CONTROL_TINT_COLOR = Color.TRANSPARENT;
/**
* By default, the text is white.
*/
public static final int DEFAULT_TEXT_COLOR = Color.WHITE;
/**
* When the playback controls are shown, hide them after DEFAULT_TIMEOUT_MS milliseconds.
*/
private static final int DEFAULT_TIMEOUT_MS = 2000;
/**
* When the controls are hidden, they fade out in FADE_OUT_DURATION_MS milliseconds.
*/
private static final int FADE_OUT_DURATION_MS = 400;
/**
* Used by the {@link MessageHandler} to indicate that media controls should fade out.
*/
private static final int FADE_OUT = 1;
/**
* Used by the {@link MessageHandler} to indicate that media controls should update progress bar.
*/
private static final int SHOW_PROGRESS = 2;
/**
* List of image buttons which are displayed in the right side of the top chrome.
*/
private List<ImageButton> actionButtons;
/**
* Whether the playback control layer is visible.
*/
private boolean isVisible;
/**
* Whether the playback control layer is currently in the process of fading out.
*/
private boolean isFadingOut;
/**
* Whether the user can drag the seek bar thumb to seek.
*/
private boolean canSeek;
/**
* <p> Derived from the Color class (ex. {@link Color#RED}), the chrome consists of three
* views, which are are tinted with the the chrome color.
*
* <p> The views are:
*
* <p> 1) The top chrome which contains the logo, title, and action buttons.
*
* <p> 2) The bottom chrome which contains the play/pause button, seek bar, and fullscreen
* buttons.
*
* <p> 3) The translucent middle section of the PlaybackControlLayer.
*
* <p> The chromeColor changes the color of each of these elements.
*/
private int chromeColor;
/**
* Derived from the {@link Color} class (ex {@link Color#RED}), this is the color of the
* play/pause button, fullscreen button, seek bar, and action buttons.
*/
private int controlColor;
/**
* Derived from the {@link Color} class (ex {@link Color#RED}), this is the color of the text
* views.
*/
private int textColor;
/**
* Derived from the {@link Color} class (ex {@link Color#RED}), this is the color of the seekbar
* track and thumb.
*/
private int seekbarColor;
/**
* Displays the elapsed time into video.
*/
private TextView currentTime;
/**
* Displays the duration of the video.
*/
private TextView endTime;
/**
* Makes player enter or leave fullscreen. This button is not displayed unless there is a
* {@link FullscreenCallback} associated with this object.
*/
private ImageButton fullscreenButton;
/**
* This callback is triggered when going to fullscreen and returning from fullscreen.
*/
private FullscreenCallback fullscreenCallback;
private PlayCallback playCallback;
/**
* The message handler which deals with displaying progress and fading out the media controls
* We use it so that we can make the view fade out after a timeout (by sending a delayed message).
*/
private Handler handler = new MessageHandler(this);
/**
* Whether the player is currently in fullscreen mode.
*/
private boolean isFullscreen;
/**
* Whether the seekbar is currently being dragged.
*/
private boolean isSeekbarDragging;
/**
* The {@link LayerManager} which is responsible for adding this layer to the container and
* displaying it on top of the video player.
*/
private LayerManager layerManager;
/**
* The drawable that will be displayed in the {@link PlaybackControlLayer#logoImageView}.
*/
private Drawable logoDrawable;
/**
* Displayed in the left of the top chrome - shows a logo. This is optional; if no image
* is provided, then no logo will be displayed.
*/
private ImageView logoImageView;
/**
* This is the layout of the container before fullscreen mode has been entered.
* When we leave fullscreen mode, we restore the layout of the container to this layout.
*/
private ViewGroup.LayoutParams originalContainerLayoutParams;
/**
* Contains the actions buttons (displayed in right of the top chrome).
*/
private LinearLayout actionButtonsContainer;
/**
* Displays the play icon when the video is playing, or the pause icon when the video is playing.
*/
private ImageButton pausePlayButton;
/**
* Displays a track and a thumb which can be used to seek to different time points in the video.
*/
private SeekBar seekBar;
/**
* Whether the play button has been pressed and the video should be playing.
* We include this variable because the video may pause when buffering must occur. Although
* the video will usually resume automatically when the buffering is complete, there are instances
* (i.e. ad playback), where it will not resume automatically. So, if we detect that the video is
* paused after buffering and should be playing, we can resume it programmatically.
*/
private boolean shouldBePlaying;
/**
* Encodes the HH:MM:SS or MM:SS time format.
*/
private StringBuilder timeFormat;
/**
* Formats times to HH:MM:SS or MM:SS form.
*/
private Formatter timeFormatter;
/**
* Contains the logo, video title, and other actions button. It can be tinted with a color for
* branding.
*/
private RelativeLayout topChrome;
/**
* This is the root view which contains all other views that make up the playback control layer.
* It can be tinted by setting the chrome color.
*/
private FrameLayout playbackControlRootView;
/**
* Contains the seek bar, current time, end time, and fullscreen button. The background can
* be tinted with a color for branding.
*/
private LinearLayout bottomChrome;
/**
* The title displayed in the {@link PlaybackControlLayer#videoTitleView}.
*/
private String videoTitle;
/**
* Video title displayed in the left of the top chrome.
*/
private TextView videoTitleView;
/**
* The view created by this {@link PlaybackControlLayer}
*/
private FrameLayout view;
/**
* Saved orientation for coming back from fullscreen.
*/
private int savedOrientation;
public PlaybackControlLayer(String videoTitle) {
this(videoTitle, null);
}
public PlaybackControlLayer(String videoTitle, FullscreenCallback fullscreenCallback) {
this.videoTitle = videoTitle;
this.canSeek = true;
this.fullscreenCallback = fullscreenCallback;
this.shouldBePlaying = false;
actionButtons = new ArrayList<ImageButton>();
}
/**
* Creates a button to put in the set of action buttons at the right of the top chrome.
* @param activity The activity that contains the video player.
* @param icon The image of the action (ex. trash can).
* @param contentDescription The text description this action. This is used in case the
* action buttons do not fit in the video player. If so, an overflow
* button will appear and, when clicked, it will display a list of the
* content descriptions for each action.
* @param onClickListener The handler for when the action is triggered.
*/
public void addActionButton(Activity activity,
Drawable icon,
String contentDescription,
View.OnClickListener onClickListener) {
ImageButton button = new ImageButton(activity);
button.setContentDescription(contentDescription);
button.setImageDrawable(icon);
button.setOnClickListener(onClickListener);
FrameLayout.LayoutParams layoutParams = new FrameLayout.LayoutParams(
ViewGroup.LayoutParams.WRAP_CONTENT,
ViewGroup.LayoutParams.WRAP_CONTENT
);
int margin = activity.getResources().getDisplayMetrics().densityDpi * 5;
layoutParams.setMargins(margin, 0, margin, 0);
button.setBackgroundColor(Color.TRANSPARENT);
button.setLayoutParams(layoutParams);
isFullscreen = false;
actionButtons.add(button);
if (playbackControlRootView != null) {
updateActionButtons();
updateColors();
}
}
@Override
public FrameLayout createView(LayerManager layerManager) {
this.layerManager = layerManager;
LayoutInflater inflater = layerManager.getActivity().getLayoutInflater();
view = (FrameLayout) inflater.inflate(R.layout.playback_control_layer, null);
setupView();
originalContainerLayoutParams = layerManager
.getContainer()
.getLayoutParams();
layerManager.getControl().addCallback(this);
savedOrientation = layerManager.getActivity().getResources().getConfiguration().orientation;
textColor = DEFAULT_TEXT_COLOR;
chromeColor = DEFAULT_CHROME_COLOR;
controlColor = DEFAULT_CONTROL_TINT_COLOR;
// Since the seek bar doens't use image assets, we can't use TRANSPARENT as the default tint
// because that would make it invisible, so instead we use the default text tint (White).
seekbarColor = DEFAULT_TEXT_COLOR;
if (logoDrawable != null) {
logoImageView.setImageDrawable(logoDrawable);
}
getLayerManager().getContainer().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (isVisible) {
hide();
} else {
show();
}
}
});
// Make the view hidden initially. It will be made visible again in the show(timeout) method.
playbackControlRootView.setVisibility(View.INVISIBLE);
return view;
}
/**
* Hides the seek bar thumb and prevents the user from seeking to different time points in the
* video.
*/
public void disableSeeking() {
this.canSeek = false;
if (playbackControlRootView != null) {
updateColors();
}
}
/**
* Fullscreen mode will rotate to landscape mode, hide the action bar, hide the navigation bar,
* hide the system tray, and make the video player take up the full size of the display.
* The developer who is using this function must ensure the following:
*
* <p>1) Inside the android manifest, the activity that uses the video player has the attribute
* android:configChanges="orientation".
*
* <p>2) Other views in the activity (or fragment) are
* hidden (or made visible) when this method is called.
*/
public void doToggleFullscreen() {
// If there is no callback for handling fullscreen, don't do anything.
if (fullscreenCallback == null) {
return;
}
PlayerControl playerControl = getLayerManager().getControl();
if (playerControl == null) {
return;
}
Activity activity = getLayerManager().getActivity();
FrameLayout container = getLayerManager().getContainer();
if (isFullscreen) {
fullscreenCallback.onReturnFromFullscreen();
activity.setRequestedOrientation(savedOrientation);
// Make the status bar and navigation bar visible again.
activity.getWindow().getDecorView().setSystemUiVisibility(0);
container.setLayoutParams(originalContainerLayoutParams);
fullscreenButton.setImageResource(R.drawable.ic_action_full_screen);
isFullscreen = false;
} else {
fullscreenCallback.onGoToFullscreen();
savedOrientation = activity.getResources().getConfiguration().orientation;
activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
activity.getWindow().getDecorView().setSystemUiVisibility(
View.SYSTEM_UI_FLAG_HIDE_NAVIGATION | View.SYSTEM_UI_FLAG_FULLSCREEN);
// Whenever the status bar and navigation bar appear, we want the playback controls to
// appear as well.
activity.getWindow().getDecorView().setOnSystemUiVisibilityChangeListener(
new View.OnSystemUiVisibilityChangeListener() {
@Override
public void onSystemUiVisibilityChange(int i) {
// By doing a logical AND, we check if the fullscreen option is triggered (i.e. the
// status bar is hidden). If the result of the logical AND is 0, that means that the
// fullscreen flag is NOT triggered. This means that the status bar is showing. If
// this is the case, then we show the playback controls as well (by calling show()).
if ((i & View.SYSTEM_UI_FLAG_FULLSCREEN) == 0) {
show();
}
}
}
);
container.setLayoutParams(Util.getLayoutParamsBasedOnParent(container,
ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.MATCH_PARENT));
fullscreenButton.setImageResource(R.drawable.ic_action_return_from_full_screen);
isFullscreen = true;
}
}
/**
* Makes the seek bar thumb visible and allows the user to seek to different time points in the
* video.
*/
public void enableSeeking() {
this.canSeek = true;
if (playbackControlRootView != null) {
updateColors();
}
}
/**
* Returns the {@link LayerManager} which is responsible for displaying this layer's view.
*/
public LayerManager getLayerManager() {
return layerManager;
}
/**
* Fades the playback control layer out and then removes it from the {@link LayerManager}'s
* container.
*/
public void hide() {
if (isFadingOut) {
return;
}
final FrameLayout container = getLayerManager().getContainer();
if (container == null) {
return;
}
if (isVisible) {
isFadingOut = true;
playbackControlRootView.animate()
.alpha(0.0f)
.setDuration(FADE_OUT_DURATION_MS)
.setListener(new Animator.AnimatorListener() {
@Override
public void onAnimationStart(Animator animation) {}
@Override
public void onAnimationEnd(Animator animation) {
isFadingOut = false;
playbackControlRootView.setVisibility(View.INVISIBLE);
container.removeView(view);
// Make sure that the status bar and navigation bar are hidden when the playback
// controls are hidden.
if (isFullscreen) {
getLayerManager().getActivity().getWindow().getDecorView().setSystemUiVisibility(
View.SYSTEM_UI_FLAG_HIDE_NAVIGATION | View.SYSTEM_UI_FLAG_FULLSCREEN);
}
handler.removeMessages(SHOW_PROGRESS);
isVisible = false;
}
@Override
public void onAnimationCancel(Animator animation) {}
@Override
public void onAnimationRepeat(Animator animation) {}
});
}
}
/**
* Add the playback control layer back to the container.
* The playback controls disappear after timeout milliseconds.
* @param timeout Hide the view after timeout milliseconds. If timeout == 0, then the playback
* controls will not disappear unless their container is tapped again.
*/
public void show(int timeout) {
if (!isVisible && getLayerManager().getContainer() != null) {
playbackControlRootView.setAlpha(1.0f);
// Make the view visible.
playbackControlRootView.setVisibility(View.VISIBLE);
updateProgress();
// Add the view to the container again.
FrameLayout.LayoutParams layoutParams = new FrameLayout.LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.MATCH_PARENT,
Gravity.CENTER
);
getLayerManager().getContainer().removeView(view);
getLayerManager().getContainer().addView(view, layoutParams);
setupView();
isVisible = true;
}
updatePlayPauseButton();
handler.sendEmptyMessage(SHOW_PROGRESS);
Message msg = handler.obtainMessage(FADE_OUT);
handler.removeMessages(FADE_OUT);
if (timeout > 0) {
handler.sendMessageDelayed(msg, timeout);
}
}
/**
* Add the playback control layer back to the container. It will disappear when the user taps
* the screen.
*/
public void show() {
show(DEFAULT_TIMEOUT_MS);
}
/**
* Hides the top chrome (which displays the logo, title, and action buttons).
*/
public void hideTopChrome() {
topChrome.setVisibility(View.GONE);
}
/**
* Shows the top chrome (which displays the logo, title, and action buttons).
*/
public void showTopChrome() {
topChrome.setVisibility(View.VISIBLE);
updateActionButtons();
updateColors();
}
/**
* Returns whether the player is currently in fullscreen mode.
*/
public boolean isFullscreen() {
return isFullscreen;
}
/**
* Make the player enter or leave fullscreen mode.
* @param shouldBeFullscreen If true, the player is put into fullscreen mode. If false, the player
* leaves fullscreen mode.
*/
public void setFullscreen(boolean shouldBeFullscreen) {
if (shouldBeFullscreen != isFullscreen) {
doToggleFullscreen();
}
}
@Override
public void onLayerDisplayed(LayerManager layerManager) {}
/**
* Updates the play/pause button to the play icon.
*/
@Override
public void onPause() {
updatePlayPauseButton();
}
/**
* Updates the play/pause button to the pause icon.
*/
@Override
public void onPlay() {
updatePlayPauseButton();
if (playCallback != null) {
playCallback.onPlay();
}
}
/**
* Sets the color of the top chrome, bottom chrome, and background.
* @param color a color derived from the @{link Color} class (ex. {@link Color#RED}).
*/
public void setChromeColor(int color) {
chromeColor = color;
if (playbackControlRootView != null) {
updateColors();
}
}
/**
* Sets the color of the buttons and seek bar.
* @param color a color derived from the @{link Color} class (ex. {@link Color#RED}).
*/
public void setControlColor(int color) {
this.controlColor = color;
if (playbackControlRootView != null) {
updateColors();
updateActionButtons();
}
}
/**
* Sets the color of the seekbar.
* @param color a color derived from the @{link Color} class (ex. {@link Color#RED}).
*/
public void setSeekbarColor(int color) {
this.seekbarColor = color;
if (playbackControlRootView != null) {
updateColors();
}
}
/**
* Sets the color of the text views
* @param color a color derived from the @{link Color} class (ex. {@link Color#RED}).
*/
public void setTextColor(int color) {
this.textColor = color;
if (playbackControlRootView != null) {
updateColors();
}
}
/**
* Set the callback which will be called when the player enters and leaves fullscreen mode.
* @param fullscreenCallback The callback should hide other views in the activity when the player
* enters fullscreen mode and show other views when the player leaves
* fullscreen mode.
*/
public void setFullscreenCallback(FullscreenCallback fullscreenCallback) {
this.fullscreenCallback = fullscreenCallback;
if (fullscreenButton != null && fullscreenCallback != null) {
fullscreenButton.setVisibility(View.VISIBLE);
} else if (fullscreenButton != null && fullscreenCallback == null) {
fullscreenButton.setVisibility(View.INVISIBLE);
}
}
/**
* Set the logo with appears in the left of the top chrome.
* @param logo The drawable which will be the logo.
*/
public void setLogoImageView(Drawable logo) {
logoDrawable = logo;
if (logoImageView != null) {
logoImageView.setImageDrawable(logo);
}
}
/**
* Play or pause the player.
* @param shouldPlay If true, then the player starts playing. If false, the player pauses.
*/
public void setPlayPause(boolean shouldPlay) {
PlayerControl playerControl = getLayerManager().getControl();
if (playerControl == null) {
return;
}
if (shouldPlay) {
playerControl.start();
} else {
playerControl.pause();
}
updatePlayPauseButton();
}
/**
* Set the title of the video in the left of the top chrome (to the right of the logo).
* @param title The video title. If it is too long, it will be ellipsized.
*/
public void setVideoTitle(String title) {
videoTitle = title;
if (videoTitleView != null) {
videoTitleView.setText(title);
}
}
/**
* Perform binding to UI, setup of event handlers and initialization of values.
*/
private void setupView() {
// Bind fields to UI elements.
pausePlayButton = (ImageButton) view.findViewById(R.id.pause);
fullscreenButton = (ImageButton) view.findViewById((R.id.fullscreen));
seekBar = (SeekBar) view.findViewById(R.id.mediacontroller_progress);
videoTitleView = (TextView) view.findViewById(R.id.video_title);
endTime = (TextView) view.findViewById(R.id.time_duration);
currentTime = (TextView) view.findViewById(R.id.time_current);
logoImageView = (ImageView) view.findViewById(R.id.logo_image);
playbackControlRootView = (FrameLayout) view.findViewById(R.id.middle_section);
topChrome = (RelativeLayout) view.findViewById(R.id.top_chrome);
bottomChrome = (LinearLayout) view.findViewById(R.id.bottom_chrome);
actionButtonsContainer = (LinearLayout) view.findViewById(R.id.actions_container);
// The play button should toggle play/pause when the play/pause button is clicked.
pausePlayButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
togglePlayPause();
show(DEFAULT_TIMEOUT_MS);
}
});
if (fullscreenCallback == null) {
fullscreenButton.setVisibility(View.INVISIBLE);
}
// Go into fullscreen when the fullscreen button is clicked.
fullscreenButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
doToggleFullscreen();
show(DEFAULT_TIMEOUT_MS);
updateActionButtons();
updateColors();
}
});
seekBar.setMax(1000);
seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromuser) {
if (!fromuser || !canSeek) {
// Ignore programmatic changes to seek bar position.
// Ignore changes to seek bar position is seeking is not enabled.
return;
}
PlayerControl playerControl = getLayerManager().getControl();
long duration = playerControl.getDuration();
long newposition = (duration * progress) / 1000L;
playerControl.seekTo((int) newposition);
if (currentTime != null) {
currentTime.setText(stringForTime((int) newposition));
}
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
show(0);
isSeekbarDragging = true;
handler.removeMessages(SHOW_PROGRESS);
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
isSeekbarDragging = false;
updateProgress();
updatePlayPauseButton();
show(DEFAULT_TIMEOUT_MS);
handler.sendEmptyMessage(SHOW_PROGRESS);
}
});
videoTitleView.setText(videoTitle);
timeFormat = new StringBuilder();
timeFormatter = new Formatter(timeFormat, Locale.getDefault());
}
/**
* Returns whether the player should be playing (based on whether the user has
* tapped pause or play). This can be used by other classes to look at the playback control
* layer's play/pause state and force the player to play or pause accordingly.
*/
public boolean shouldBePlaying() {
return shouldBePlaying;
}
/**
* Format the milliseconds to HH:MM:SS or MM:SS format.
*/
public String stringForTime(int timeMs) {
int totalSeconds = timeMs / 1000;
int seconds = totalSeconds % 60;
int minutes = (totalSeconds / 60) % 60;
int hours = totalSeconds / 3600;
timeFormat.setLength(0);
if (hours > 0) {
return timeFormatter.format("%d:%02d:%02d", hours, minutes, seconds).toString();
} else {
return timeFormatter.format("%02d:%02d", minutes, seconds).toString();
}
}
/**
* If the player is paused, play it and if the player is playing, pause it.
*/
public void togglePlayPause() {
this.shouldBePlaying = !getLayerManager().getControl().isPlaying();
setPlayPause(shouldBePlaying);
}
/**
* The action buttons are displayed in the top right of the video player. If the player is in
* portrait mode, then display an overflow button which displays a dialog window containing the
* possible actions. If the player is in landscape, then display the images for the actions in the
* top right of the video player.
*/
public void updateActionButtons() {
actionButtonsContainer.removeAllViews();
if (isFullscreen) {
for (ImageButton imageButton : actionButtons) {
actionButtonsContainer.addView(imageButton);
}
} else {
ImageButton overflowButton = new ImageButton(getLayerManager().getActivity());
overflowButton.setContentDescription(getLayerManager()
.getActivity()
.getString(R.string.overflow));
overflowButton.setImageDrawable(getLayerManager()
.getActivity()
.getResources()
.getDrawable(R.drawable.ic_action_overflow));
AlertDialog.Builder builder = new AlertDialog.Builder(getLayerManager().getActivity());
builder.setTitle(getLayerManager().getActivity().getString(R.string.select_an_action));
final CharSequence[] actions = new CharSequence[actionButtons.size()];
for (int i = 0; i < actionButtons.size(); i++) {
actions[i] = actionButtons.get(i).getContentDescription();
}
builder.setItems(actions, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
actionButtons.get(i).performClick();
}
});
final AlertDialog alertDialog = builder.create();
overflowButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
alertDialog.show();
}
});
FrameLayout.LayoutParams layoutParams = new FrameLayout.LayoutParams(
ViewGroup.LayoutParams.WRAP_CONTENT,
ViewGroup.LayoutParams.WRAP_CONTENT
);
int margin = 5 * getLayerManager()
.getActivity()
.getResources()
.getDisplayMetrics()
.densityDpi;
layoutParams.setMargins(margin, 0, margin, 0);
overflowButton.setBackgroundColor(Color.TRANSPARENT);
overflowButton.setLayoutParams(layoutParams);
overflowButton.setColorFilter(controlColor);
actionButtonsContainer.addView(overflowButton);
}
}
/**
* Ensure that the chrome, control, and text colors displayed on the screen are correct.
*/
public void updateColors() {
currentTime.setTextColor(textColor);
endTime.setTextColor(textColor);
videoTitleView.setTextColor(textColor);
fullscreenButton.setColorFilter(controlColor);
pausePlayButton.setColorFilter(controlColor);
seekBar.getProgressDrawable().setColorFilter(seekbarColor, PorterDuff.Mode.SRC_ATOP);
seekBar.getThumb().setColorFilter(seekbarColor, PorterDuff.Mode.SRC_ATOP);
// Hide the thumb drawable if the SeekBar is disabled
if (canSeek) {
seekBar.getThumb().mutate().setAlpha(255);
} else {
seekBar.getThumb().mutate().setAlpha(0);
}
for (ImageButton imageButton : actionButtons) {
imageButton.setColorFilter(controlColor);
}
topChrome.setBackgroundColor(chromeColor);
bottomChrome.setBackgroundColor(chromeColor);
}
/**
* Change the icon of the play/pause button to indicate play or pause based on the state of the
* video player.
*/
public void updatePlayPauseButton() {
PlayerControl playerControl = getLayerManager().getControl();
if (view == null || pausePlayButton == null || playerControl == null) {
return;
}
if (playerControl.isPlaying()) {
pausePlayButton.setImageResource(R.drawable.ic_action_pause_large);
} else {
pausePlayButton.setImageResource(R.drawable.ic_action_play_large);
}
}
/**
* Adjust the position of the action bar to reflect the progress of the video.
*/
public int updateProgress() {
PlayerControl playerControl = getLayerManager().getControl();
if (playerControl == null || isSeekbarDragging) {
return 0;
}
int position = playerControl.getCurrentPosition();
int duration = playerControl.getDuration();
if (seekBar != null) {
if (duration > 0) {
long pos = 1000L * position / duration;
seekBar.setProgress((int) pos);
}
int percent = playerControl.getBufferPercentage();
seekBar.setSecondaryProgress(percent * 10);
}
if (endTime != null) {
endTime.setText(stringForTime(duration));
}
if (currentTime != null) {
currentTime.setText(stringForTime(position));
}
return position;
}
/**
* Set play callback
*/
public void setPlayCallback(PlayCallback playCallback) {
this.playCallback = playCallback;
}
} |
googleapis/google-cloud-java | 35,947 | java-datastream/proto-google-cloud-datastream-v1/src/main/java/com/google/cloud/datastream/v1/PostgresqlSchema.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datastream/v1/datastream_resources.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datastream.v1;
/**
*
*
* <pre>
* PostgreSQL schema.
* </pre>
*
* Protobuf type {@code google.cloud.datastream.v1.PostgresqlSchema}
*/
public final class PostgresqlSchema extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datastream.v1.PostgresqlSchema)
PostgresqlSchemaOrBuilder {
private static final long serialVersionUID = 0L;
// Use PostgresqlSchema.newBuilder() to construct.
private PostgresqlSchema(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private PostgresqlSchema() {
schema_ = "";
postgresqlTables_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new PostgresqlSchema();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datastream.v1.DatastreamResourcesProto
.internal_static_google_cloud_datastream_v1_PostgresqlSchema_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datastream.v1.DatastreamResourcesProto
.internal_static_google_cloud_datastream_v1_PostgresqlSchema_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datastream.v1.PostgresqlSchema.class,
com.google.cloud.datastream.v1.PostgresqlSchema.Builder.class);
}
public static final int SCHEMA_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object schema_ = "";
/**
*
*
* <pre>
* Schema name.
* </pre>
*
* <code>string schema = 1;</code>
*
* @return The schema.
*/
@java.lang.Override
public java.lang.String getSchema() {
java.lang.Object ref = schema_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
schema_ = s;
return s;
}
}
/**
*
*
* <pre>
* Schema name.
* </pre>
*
* <code>string schema = 1;</code>
*
* @return The bytes for schema.
*/
@java.lang.Override
public com.google.protobuf.ByteString getSchemaBytes() {
java.lang.Object ref = schema_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
schema_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int POSTGRESQL_TABLES_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.datastream.v1.PostgresqlTable> postgresqlTables_;
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.PostgresqlTable postgresql_tables = 2;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.datastream.v1.PostgresqlTable> getPostgresqlTablesList() {
return postgresqlTables_;
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.PostgresqlTable postgresql_tables = 2;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.datastream.v1.PostgresqlTableOrBuilder>
getPostgresqlTablesOrBuilderList() {
return postgresqlTables_;
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.PostgresqlTable postgresql_tables = 2;</code>
*/
@java.lang.Override
public int getPostgresqlTablesCount() {
return postgresqlTables_.size();
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.PostgresqlTable postgresql_tables = 2;</code>
*/
@java.lang.Override
public com.google.cloud.datastream.v1.PostgresqlTable getPostgresqlTables(int index) {
return postgresqlTables_.get(index);
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.PostgresqlTable postgresql_tables = 2;</code>
*/
@java.lang.Override
public com.google.cloud.datastream.v1.PostgresqlTableOrBuilder getPostgresqlTablesOrBuilder(
int index) {
return postgresqlTables_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(schema_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, schema_);
}
for (int i = 0; i < postgresqlTables_.size(); i++) {
output.writeMessage(2, postgresqlTables_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(schema_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, schema_);
}
for (int i = 0; i < postgresqlTables_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, postgresqlTables_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datastream.v1.PostgresqlSchema)) {
return super.equals(obj);
}
com.google.cloud.datastream.v1.PostgresqlSchema other =
(com.google.cloud.datastream.v1.PostgresqlSchema) obj;
if (!getSchema().equals(other.getSchema())) return false;
if (!getPostgresqlTablesList().equals(other.getPostgresqlTablesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + SCHEMA_FIELD_NUMBER;
hash = (53 * hash) + getSchema().hashCode();
if (getPostgresqlTablesCount() > 0) {
hash = (37 * hash) + POSTGRESQL_TABLES_FIELD_NUMBER;
hash = (53 * hash) + getPostgresqlTablesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datastream.v1.PostgresqlSchema parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datastream.v1.PostgresqlSchema parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datastream.v1.PostgresqlSchema parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datastream.v1.PostgresqlSchema parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datastream.v1.PostgresqlSchema parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datastream.v1.PostgresqlSchema parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datastream.v1.PostgresqlSchema parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datastream.v1.PostgresqlSchema parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datastream.v1.PostgresqlSchema parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datastream.v1.PostgresqlSchema parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datastream.v1.PostgresqlSchema parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datastream.v1.PostgresqlSchema parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.datastream.v1.PostgresqlSchema prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* PostgreSQL schema.
* </pre>
*
* Protobuf type {@code google.cloud.datastream.v1.PostgresqlSchema}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datastream.v1.PostgresqlSchema)
com.google.cloud.datastream.v1.PostgresqlSchemaOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datastream.v1.DatastreamResourcesProto
.internal_static_google_cloud_datastream_v1_PostgresqlSchema_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datastream.v1.DatastreamResourcesProto
.internal_static_google_cloud_datastream_v1_PostgresqlSchema_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datastream.v1.PostgresqlSchema.class,
com.google.cloud.datastream.v1.PostgresqlSchema.Builder.class);
}
// Construct using com.google.cloud.datastream.v1.PostgresqlSchema.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
schema_ = "";
if (postgresqlTablesBuilder_ == null) {
postgresqlTables_ = java.util.Collections.emptyList();
} else {
postgresqlTables_ = null;
postgresqlTablesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datastream.v1.DatastreamResourcesProto
.internal_static_google_cloud_datastream_v1_PostgresqlSchema_descriptor;
}
@java.lang.Override
public com.google.cloud.datastream.v1.PostgresqlSchema getDefaultInstanceForType() {
return com.google.cloud.datastream.v1.PostgresqlSchema.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datastream.v1.PostgresqlSchema build() {
com.google.cloud.datastream.v1.PostgresqlSchema result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datastream.v1.PostgresqlSchema buildPartial() {
com.google.cloud.datastream.v1.PostgresqlSchema result =
new com.google.cloud.datastream.v1.PostgresqlSchema(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.datastream.v1.PostgresqlSchema result) {
if (postgresqlTablesBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
postgresqlTables_ = java.util.Collections.unmodifiableList(postgresqlTables_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.postgresqlTables_ = postgresqlTables_;
} else {
result.postgresqlTables_ = postgresqlTablesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.datastream.v1.PostgresqlSchema result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.schema_ = schema_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datastream.v1.PostgresqlSchema) {
return mergeFrom((com.google.cloud.datastream.v1.PostgresqlSchema) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datastream.v1.PostgresqlSchema other) {
if (other == com.google.cloud.datastream.v1.PostgresqlSchema.getDefaultInstance())
return this;
if (!other.getSchema().isEmpty()) {
schema_ = other.schema_;
bitField0_ |= 0x00000001;
onChanged();
}
if (postgresqlTablesBuilder_ == null) {
if (!other.postgresqlTables_.isEmpty()) {
if (postgresqlTables_.isEmpty()) {
postgresqlTables_ = other.postgresqlTables_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensurePostgresqlTablesIsMutable();
postgresqlTables_.addAll(other.postgresqlTables_);
}
onChanged();
}
} else {
if (!other.postgresqlTables_.isEmpty()) {
if (postgresqlTablesBuilder_.isEmpty()) {
postgresqlTablesBuilder_.dispose();
postgresqlTablesBuilder_ = null;
postgresqlTables_ = other.postgresqlTables_;
bitField0_ = (bitField0_ & ~0x00000002);
postgresqlTablesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getPostgresqlTablesFieldBuilder()
: null;
} else {
postgresqlTablesBuilder_.addAllMessages(other.postgresqlTables_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
schema_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
com.google.cloud.datastream.v1.PostgresqlTable m =
input.readMessage(
com.google.cloud.datastream.v1.PostgresqlTable.parser(), extensionRegistry);
if (postgresqlTablesBuilder_ == null) {
ensurePostgresqlTablesIsMutable();
postgresqlTables_.add(m);
} else {
postgresqlTablesBuilder_.addMessage(m);
}
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object schema_ = "";
/**
*
*
* <pre>
* Schema name.
* </pre>
*
* <code>string schema = 1;</code>
*
* @return The schema.
*/
public java.lang.String getSchema() {
java.lang.Object ref = schema_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
schema_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Schema name.
* </pre>
*
* <code>string schema = 1;</code>
*
* @return The bytes for schema.
*/
public com.google.protobuf.ByteString getSchemaBytes() {
java.lang.Object ref = schema_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
schema_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Schema name.
* </pre>
*
* <code>string schema = 1;</code>
*
* @param value The schema to set.
* @return This builder for chaining.
*/
public Builder setSchema(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
schema_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Schema name.
* </pre>
*
* <code>string schema = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearSchema() {
schema_ = getDefaultInstance().getSchema();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Schema name.
* </pre>
*
* <code>string schema = 1;</code>
*
* @param value The bytes for schema to set.
* @return This builder for chaining.
*/
public Builder setSchemaBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
schema_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.util.List<com.google.cloud.datastream.v1.PostgresqlTable> postgresqlTables_ =
java.util.Collections.emptyList();
private void ensurePostgresqlTablesIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
postgresqlTables_ =
new java.util.ArrayList<com.google.cloud.datastream.v1.PostgresqlTable>(
postgresqlTables_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datastream.v1.PostgresqlTable,
com.google.cloud.datastream.v1.PostgresqlTable.Builder,
com.google.cloud.datastream.v1.PostgresqlTableOrBuilder>
postgresqlTablesBuilder_;
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.PostgresqlTable postgresql_tables = 2;</code>
*/
public java.util.List<com.google.cloud.datastream.v1.PostgresqlTable>
getPostgresqlTablesList() {
if (postgresqlTablesBuilder_ == null) {
return java.util.Collections.unmodifiableList(postgresqlTables_);
} else {
return postgresqlTablesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.PostgresqlTable postgresql_tables = 2;</code>
*/
public int getPostgresqlTablesCount() {
if (postgresqlTablesBuilder_ == null) {
return postgresqlTables_.size();
} else {
return postgresqlTablesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.PostgresqlTable postgresql_tables = 2;</code>
*/
public com.google.cloud.datastream.v1.PostgresqlTable getPostgresqlTables(int index) {
if (postgresqlTablesBuilder_ == null) {
return postgresqlTables_.get(index);
} else {
return postgresqlTablesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.PostgresqlTable postgresql_tables = 2;</code>
*/
public Builder setPostgresqlTables(
int index, com.google.cloud.datastream.v1.PostgresqlTable value) {
if (postgresqlTablesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePostgresqlTablesIsMutable();
postgresqlTables_.set(index, value);
onChanged();
} else {
postgresqlTablesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.PostgresqlTable postgresql_tables = 2;</code>
*/
public Builder setPostgresqlTables(
int index, com.google.cloud.datastream.v1.PostgresqlTable.Builder builderForValue) {
if (postgresqlTablesBuilder_ == null) {
ensurePostgresqlTablesIsMutable();
postgresqlTables_.set(index, builderForValue.build());
onChanged();
} else {
postgresqlTablesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.PostgresqlTable postgresql_tables = 2;</code>
*/
public Builder addPostgresqlTables(com.google.cloud.datastream.v1.PostgresqlTable value) {
if (postgresqlTablesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePostgresqlTablesIsMutable();
postgresqlTables_.add(value);
onChanged();
} else {
postgresqlTablesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.PostgresqlTable postgresql_tables = 2;</code>
*/
public Builder addPostgresqlTables(
int index, com.google.cloud.datastream.v1.PostgresqlTable value) {
if (postgresqlTablesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePostgresqlTablesIsMutable();
postgresqlTables_.add(index, value);
onChanged();
} else {
postgresqlTablesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.PostgresqlTable postgresql_tables = 2;</code>
*/
public Builder addPostgresqlTables(
com.google.cloud.datastream.v1.PostgresqlTable.Builder builderForValue) {
if (postgresqlTablesBuilder_ == null) {
ensurePostgresqlTablesIsMutable();
postgresqlTables_.add(builderForValue.build());
onChanged();
} else {
postgresqlTablesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.PostgresqlTable postgresql_tables = 2;</code>
*/
public Builder addPostgresqlTables(
int index, com.google.cloud.datastream.v1.PostgresqlTable.Builder builderForValue) {
if (postgresqlTablesBuilder_ == null) {
ensurePostgresqlTablesIsMutable();
postgresqlTables_.add(index, builderForValue.build());
onChanged();
} else {
postgresqlTablesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.PostgresqlTable postgresql_tables = 2;</code>
*/
public Builder addAllPostgresqlTables(
java.lang.Iterable<? extends com.google.cloud.datastream.v1.PostgresqlTable> values) {
if (postgresqlTablesBuilder_ == null) {
ensurePostgresqlTablesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, postgresqlTables_);
onChanged();
} else {
postgresqlTablesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.PostgresqlTable postgresql_tables = 2;</code>
*/
public Builder clearPostgresqlTables() {
if (postgresqlTablesBuilder_ == null) {
postgresqlTables_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
postgresqlTablesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.PostgresqlTable postgresql_tables = 2;</code>
*/
public Builder removePostgresqlTables(int index) {
if (postgresqlTablesBuilder_ == null) {
ensurePostgresqlTablesIsMutable();
postgresqlTables_.remove(index);
onChanged();
} else {
postgresqlTablesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.PostgresqlTable postgresql_tables = 2;</code>
*/
public com.google.cloud.datastream.v1.PostgresqlTable.Builder getPostgresqlTablesBuilder(
int index) {
return getPostgresqlTablesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.PostgresqlTable postgresql_tables = 2;</code>
*/
public com.google.cloud.datastream.v1.PostgresqlTableOrBuilder getPostgresqlTablesOrBuilder(
int index) {
if (postgresqlTablesBuilder_ == null) {
return postgresqlTables_.get(index);
} else {
return postgresqlTablesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.PostgresqlTable postgresql_tables = 2;</code>
*/
public java.util.List<? extends com.google.cloud.datastream.v1.PostgresqlTableOrBuilder>
getPostgresqlTablesOrBuilderList() {
if (postgresqlTablesBuilder_ != null) {
return postgresqlTablesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(postgresqlTables_);
}
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.PostgresqlTable postgresql_tables = 2;</code>
*/
public com.google.cloud.datastream.v1.PostgresqlTable.Builder addPostgresqlTablesBuilder() {
return getPostgresqlTablesFieldBuilder()
.addBuilder(com.google.cloud.datastream.v1.PostgresqlTable.getDefaultInstance());
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.PostgresqlTable postgresql_tables = 2;</code>
*/
public com.google.cloud.datastream.v1.PostgresqlTable.Builder addPostgresqlTablesBuilder(
int index) {
return getPostgresqlTablesFieldBuilder()
.addBuilder(index, com.google.cloud.datastream.v1.PostgresqlTable.getDefaultInstance());
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.PostgresqlTable postgresql_tables = 2;</code>
*/
public java.util.List<com.google.cloud.datastream.v1.PostgresqlTable.Builder>
getPostgresqlTablesBuilderList() {
return getPostgresqlTablesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datastream.v1.PostgresqlTable,
com.google.cloud.datastream.v1.PostgresqlTable.Builder,
com.google.cloud.datastream.v1.PostgresqlTableOrBuilder>
getPostgresqlTablesFieldBuilder() {
if (postgresqlTablesBuilder_ == null) {
postgresqlTablesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datastream.v1.PostgresqlTable,
com.google.cloud.datastream.v1.PostgresqlTable.Builder,
com.google.cloud.datastream.v1.PostgresqlTableOrBuilder>(
postgresqlTables_,
((bitField0_ & 0x00000002) != 0),
getParentForChildren(),
isClean());
postgresqlTables_ = null;
}
return postgresqlTablesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datastream.v1.PostgresqlSchema)
}
// @@protoc_insertion_point(class_scope:google.cloud.datastream.v1.PostgresqlSchema)
private static final com.google.cloud.datastream.v1.PostgresqlSchema DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datastream.v1.PostgresqlSchema();
}
public static com.google.cloud.datastream.v1.PostgresqlSchema getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<PostgresqlSchema> PARSER =
new com.google.protobuf.AbstractParser<PostgresqlSchema>() {
@java.lang.Override
public PostgresqlSchema parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<PostgresqlSchema> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<PostgresqlSchema> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datastream.v1.PostgresqlSchema getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hop | 36,152 | plugins/actions/copymoveresultfilenames/src/main/java/org/apache/hop/workflow/actions/copymoveresultfilenames/ActionCopyMoveResultFilenamesDialog.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hop.workflow.actions.copymoveresultfilenames;
import org.apache.hop.core.Const;
import org.apache.hop.core.util.Utils;
import org.apache.hop.core.variables.IVariables;
import org.apache.hop.i18n.BaseMessages;
import org.apache.hop.ui.core.PropsUi;
import org.apache.hop.ui.core.dialog.BaseDialog;
import org.apache.hop.ui.core.dialog.MessageBox;
import org.apache.hop.ui.core.widget.TextVar;
import org.apache.hop.ui.pipeline.transform.BaseTransformDialog;
import org.apache.hop.ui.workflow.action.ActionDialog;
import org.apache.hop.ui.workflow.dialog.WorkflowDialog;
import org.apache.hop.workflow.WorkflowMeta;
import org.apache.hop.workflow.action.IAction;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.CCombo;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Group;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Text;
/** This dialog allows you to edit the Copy/Move result filenames action settings. */
public class ActionCopyMoveResultFilenamesDialog extends ActionDialog {
private static final Class<?> PKG = ActionCopyMoveResultFilenames.class;
private Text wName;
private Button wSpecifyWildcard;
private Label wlWildcard;
private TextVar wWildcard;
private Label wlWildcardExclude;
private TextVar wWildcardExclude;
private CCombo wAction;
private ActionCopyMoveResultFilenames action;
private boolean changed;
private Label wlFoldername;
private Button wbFoldername;
private TextVar wFoldername;
private Label wlAddDate;
private Button wAddDate;
private Label wlAddTime;
private Button wAddTime;
private Label wlSpecifyFormat;
private Button wSpecifyFormat;
private Label wlDateTimeFormat;
private CCombo wDateTimeFormat;
private CCombo wSuccessCondition;
private Label wlAddDateBeforeExtension;
private Button wAddDateBeforeExtension;
private Label wlNrErrorsLessThan;
private TextVar wNrErrorsLessThan;
private Label wlOverwriteFile;
private Button wOverwriteFile;
private Label wlCreateDestinationFolder;
private Button wCreateDestinationFolder;
private Label wlRemovedSourceFilename;
private Button wRemovedSourceFilename;
private Label wlAddDestinationFilename;
private Button wAddDestinationFilename;
public ActionCopyMoveResultFilenamesDialog(
Shell parent,
ActionCopyMoveResultFilenames action,
WorkflowMeta workflowMeta,
IVariables variables) {
super(parent, workflowMeta, variables);
this.action = action;
if (this.action.getName() == null) {
this.action.setName(BaseMessages.getString(PKG, "ActionDeleteResultFilenames.Name.Default"));
}
}
@Override
public IAction open() {
Shell parent = getParent();
shell = new Shell(parent, SWT.DIALOG_TRIM | SWT.MIN | SWT.MAX | SWT.RESIZE);
PropsUi.setLook(shell);
WorkflowDialog.setShellImage(shell, action);
ModifyListener lsMod = e -> action.setChanged();
changed = action.hasChanged();
FormLayout formLayout = new FormLayout();
formLayout.marginWidth = PropsUi.getFormMargin();
formLayout.marginHeight = PropsUi.getFormMargin();
shell.setLayout(formLayout);
shell.setText(BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.Title"));
int middle = props.getMiddlePct();
int margin = PropsUi.getMargin();
// Name line
Label wlName = new Label(shell, SWT.RIGHT);
wlName.setText(BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.Name.Label"));
PropsUi.setLook(wlName);
FormData fdlName = new FormData();
fdlName.left = new FormAttachment(0, 0);
fdlName.right = new FormAttachment(middle, -margin);
fdlName.top = new FormAttachment(0, margin);
wlName.setLayoutData(fdlName);
wName = new Text(shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
PropsUi.setLook(wName);
wName.addModifyListener(lsMod);
FormData fdName = new FormData();
fdName.left = new FormAttachment(middle, 0);
fdName.top = new FormAttachment(0, margin);
fdName.right = new FormAttachment(100, 0);
wName.setLayoutData(fdName);
// Copy or Move
Label wlAction = new Label(shell, SWT.RIGHT);
wlAction.setText(BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.Action.Label"));
PropsUi.setLook(wlAction);
FormData fdlAction = new FormData();
fdlAction.left = new FormAttachment(0, 0);
fdlAction.right = new FormAttachment(middle, -margin);
fdlAction.top = new FormAttachment(wName, 2 * margin);
wlAction.setLayoutData(fdlAction);
wAction = new CCombo(shell, SWT.SINGLE | SWT.READ_ONLY | SWT.BORDER);
wAction.add(BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.Copy.Label"));
wAction.add(BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.Move.Label"));
wAction.add(BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.Delete.Label"));
wAction.select(0); // +1: starts at -1
PropsUi.setLook(wAction);
FormData fdAction = new FormData();
fdAction.left = new FormAttachment(middle, 0);
fdAction.top = new FormAttachment(wName, 2 * margin);
fdAction.right = new FormAttachment(100, 0);
wAction.setLayoutData(fdAction);
wAction.addSelectionListener(
new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
enableAction();
action.setChanged();
}
});
// Foldername line
wlFoldername = new Label(shell, SWT.RIGHT);
wlFoldername.setText(
BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.Foldername.Label"));
PropsUi.setLook(wlFoldername);
FormData fdlFoldername = new FormData();
fdlFoldername.left = new FormAttachment(0, 0);
fdlFoldername.top = new FormAttachment(wAction, margin);
fdlFoldername.right = new FormAttachment(middle, -margin);
wlFoldername.setLayoutData(fdlFoldername);
wbFoldername = new Button(shell, SWT.PUSH | SWT.CENTER);
PropsUi.setLook(wbFoldername);
wbFoldername.setText(BaseMessages.getString(PKG, "System.Button.Browse"));
FormData fdbFoldername = new FormData();
fdbFoldername.right = new FormAttachment(100, 0);
fdbFoldername.top = new FormAttachment(wAction, 0);
wbFoldername.setLayoutData(fdbFoldername);
wFoldername = new TextVar(variables, shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
PropsUi.setLook(wFoldername);
wFoldername.addModifyListener(lsMod);
FormData fdFoldername = new FormData();
fdFoldername.left = new FormAttachment(middle, 0);
fdFoldername.top = new FormAttachment(wAction, margin);
fdFoldername.right = new FormAttachment(wbFoldername, -margin);
wFoldername.setLayoutData(fdFoldername);
// Whenever something changes, set the tooltip to the expanded version:
wFoldername.addModifyListener(
e -> wFoldername.setToolTipText(variables.resolve(wFoldername.getText())));
wbFoldername.addListener(
SWT.Selection, e -> BaseDialog.presentDirectoryDialog(shell, wFoldername, variables));
// Create destination folder
wlCreateDestinationFolder = new Label(shell, SWT.RIGHT);
wlCreateDestinationFolder.setText(
BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.CreateDestinationFolder.Label"));
PropsUi.setLook(wlCreateDestinationFolder);
FormData fdlCreateDestinationFolder = new FormData();
fdlCreateDestinationFolder.left = new FormAttachment(0, 0);
fdlCreateDestinationFolder.top = new FormAttachment(wFoldername, margin);
fdlCreateDestinationFolder.right = new FormAttachment(middle, -margin);
wlCreateDestinationFolder.setLayoutData(fdlCreateDestinationFolder);
wCreateDestinationFolder = new Button(shell, SWT.CHECK);
PropsUi.setLook(wCreateDestinationFolder);
wCreateDestinationFolder.setToolTipText(
BaseMessages.getString(
PKG, "ActionCopyMoveResultFilenames.CreateDestinationFolder.Tooltip"));
FormData fdCreateDestinationFolder = new FormData();
fdCreateDestinationFolder.left = new FormAttachment(middle, 0);
fdCreateDestinationFolder.top = new FormAttachment(wlCreateDestinationFolder, 0, SWT.CENTER);
fdCreateDestinationFolder.right = new FormAttachment(100, 0);
wCreateDestinationFolder.setLayoutData(fdCreateDestinationFolder);
wCreateDestinationFolder.addSelectionListener(
new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
action.setChanged();
}
});
// Overwrite files
wlOverwriteFile = new Label(shell, SWT.RIGHT);
wlOverwriteFile.setText(
BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.OverwriteFile.Label"));
PropsUi.setLook(wlOverwriteFile);
FormData fdlOverwriteFile = new FormData();
fdlOverwriteFile.left = new FormAttachment(0, 0);
fdlOverwriteFile.top = new FormAttachment(wlCreateDestinationFolder, 2 * margin);
fdlOverwriteFile.right = new FormAttachment(middle, -margin);
wlOverwriteFile.setLayoutData(fdlOverwriteFile);
wOverwriteFile = new Button(shell, SWT.CHECK);
PropsUi.setLook(wOverwriteFile);
wOverwriteFile.setToolTipText(
BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.OverwriteFile.Tooltip"));
FormData fdOverwriteFile = new FormData();
fdOverwriteFile.left = new FormAttachment(middle, 0);
fdOverwriteFile.top = new FormAttachment(wlOverwriteFile, 0, SWT.CENTER);
fdOverwriteFile.right = new FormAttachment(100, 0);
wOverwriteFile.setLayoutData(fdOverwriteFile);
wOverwriteFile.addSelectionListener(
new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
action.setChanged();
}
});
// Remove source filename from result filenames
wlRemovedSourceFilename = new Label(shell, SWT.RIGHT);
wlRemovedSourceFilename.setText(
BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.RemovedSourceFilename.Label"));
PropsUi.setLook(wlRemovedSourceFilename);
FormData fdlRemovedSourceFilename = new FormData();
fdlRemovedSourceFilename.left = new FormAttachment(0, 0);
fdlRemovedSourceFilename.top = new FormAttachment(wlOverwriteFile, 2 * margin);
fdlRemovedSourceFilename.right = new FormAttachment(middle, -margin);
wlRemovedSourceFilename.setLayoutData(fdlRemovedSourceFilename);
wRemovedSourceFilename = new Button(shell, SWT.CHECK);
PropsUi.setLook(wRemovedSourceFilename);
wRemovedSourceFilename.setToolTipText(
BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.RemovedSourceFilename.Tooltip"));
FormData fdRemovedSourceFilename = new FormData();
fdRemovedSourceFilename.left = new FormAttachment(middle, 0);
fdRemovedSourceFilename.top = new FormAttachment(wlRemovedSourceFilename, 0, SWT.CENTER);
fdRemovedSourceFilename.right = new FormAttachment(100, 0);
wRemovedSourceFilename.setLayoutData(fdRemovedSourceFilename);
wRemovedSourceFilename.addSelectionListener(
new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
action.setChanged();
}
});
// Add destination filename to result filenames
wlAddDestinationFilename = new Label(shell, SWT.RIGHT);
wlAddDestinationFilename.setText(
BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.AddDestinationFilename.Label"));
PropsUi.setLook(wlAddDestinationFilename);
FormData fdlAddDestinationFilename = new FormData();
fdlAddDestinationFilename.left = new FormAttachment(0, 0);
fdlAddDestinationFilename.top = new FormAttachment(wlRemovedSourceFilename, 2 * margin);
fdlAddDestinationFilename.right = new FormAttachment(middle, -margin);
wlAddDestinationFilename.setLayoutData(fdlAddDestinationFilename);
wAddDestinationFilename = new Button(shell, SWT.CHECK);
PropsUi.setLook(wAddDestinationFilename);
wAddDestinationFilename.setToolTipText(
BaseMessages.getString(
PKG, "ActionCopyMoveResultFilenames.AddDestinationFilename.Tooltip"));
FormData fdAddDestinationFilename = new FormData();
fdAddDestinationFilename.left = new FormAttachment(middle, 0);
fdAddDestinationFilename.top = new FormAttachment(wlAddDestinationFilename, 0, SWT.CENTER);
fdAddDestinationFilename.right = new FormAttachment(100, 0);
wAddDestinationFilename.setLayoutData(fdAddDestinationFilename);
wAddDestinationFilename.addSelectionListener(
new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
action.setChanged();
}
});
// Create multi-part file?
wlAddDate = new Label(shell, SWT.RIGHT);
wlAddDate.setText(BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.AddDate.Label"));
PropsUi.setLook(wlAddDate);
FormData fdlAddDate = new FormData();
fdlAddDate.left = new FormAttachment(0, 0);
fdlAddDate.top = new FormAttachment(wlAddDestinationFilename, 2 * margin);
fdlAddDate.right = new FormAttachment(middle, -margin);
wlAddDate.setLayoutData(fdlAddDate);
wAddDate = new Button(shell, SWT.CHECK);
PropsUi.setLook(wAddDate);
wAddDate.setToolTipText(
BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.AddDate.Tooltip"));
FormData fdAddDate = new FormData();
fdAddDate.left = new FormAttachment(middle, 0);
fdAddDate.top = new FormAttachment(wlAddDate, 0, SWT.CENTER);
fdAddDate.right = new FormAttachment(100, 0);
wAddDate.setLayoutData(fdAddDate);
wAddDate.addSelectionListener(
new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
action.setChanged();
setAddDateBeforeExtension();
}
});
// Create multi-part file?
wlAddTime = new Label(shell, SWT.RIGHT);
wlAddTime.setText(BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.AddTime.Label"));
PropsUi.setLook(wlAddTime);
FormData fdlAddTime = new FormData();
fdlAddTime.left = new FormAttachment(0, 0);
fdlAddTime.top = new FormAttachment(wlAddDate, 2 * margin);
fdlAddTime.right = new FormAttachment(middle, -margin);
wlAddTime.setLayoutData(fdlAddTime);
wAddTime = new Button(shell, SWT.CHECK);
PropsUi.setLook(wAddTime);
wAddTime.setToolTipText(
BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.AddTime.Tooltip"));
FormData fdAddTime = new FormData();
fdAddTime.left = new FormAttachment(middle, 0);
fdAddTime.top = new FormAttachment(wlAddTime, 0, SWT.CENTER);
fdAddTime.right = new FormAttachment(100, 0);
wAddTime.setLayoutData(fdAddTime);
wAddTime.addSelectionListener(
new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
action.setChanged();
setAddDateBeforeExtension();
}
});
// Specify date time format?
wlSpecifyFormat = new Label(shell, SWT.RIGHT);
wlSpecifyFormat.setText(
BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.SpecifyFormat.Label"));
PropsUi.setLook(wlSpecifyFormat);
FormData fdlSpecifyFormat = new FormData();
fdlSpecifyFormat.left = new FormAttachment(0, 0);
fdlSpecifyFormat.top = new FormAttachment(wlAddTime, 2 * margin);
fdlSpecifyFormat.right = new FormAttachment(middle, -margin);
wlSpecifyFormat.setLayoutData(fdlSpecifyFormat);
wSpecifyFormat = new Button(shell, SWT.CHECK);
PropsUi.setLook(wSpecifyFormat);
wSpecifyFormat.setToolTipText(
BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.SpecifyFormat.Tooltip"));
FormData fdSpecifyFormat = new FormData();
fdSpecifyFormat.left = new FormAttachment(middle, 0);
fdSpecifyFormat.top = new FormAttachment(wlSpecifyFormat, 0, SWT.CENTER);
fdSpecifyFormat.right = new FormAttachment(100, 0);
wSpecifyFormat.setLayoutData(fdSpecifyFormat);
wSpecifyFormat.addSelectionListener(
new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
action.setChanged();
setDateTimeFormat();
setAddDateBeforeExtension();
}
});
// Prepare a list of possible DateTimeFormats...
String[] dats = Const.getDateFormats();
// DateTimeFormat
wlDateTimeFormat = new Label(shell, SWT.RIGHT);
wlDateTimeFormat.setText(
BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.DateTimeFormat.Label"));
PropsUi.setLook(wlDateTimeFormat);
FormData fdlDateTimeFormat = new FormData();
fdlDateTimeFormat.left = new FormAttachment(0, 0);
fdlDateTimeFormat.top = new FormAttachment(wlSpecifyFormat, 2 * margin);
fdlDateTimeFormat.right = new FormAttachment(middle, -margin);
wlDateTimeFormat.setLayoutData(fdlDateTimeFormat);
wDateTimeFormat = new CCombo(shell, SWT.BORDER | SWT.READ_ONLY);
wDateTimeFormat.setEditable(true);
PropsUi.setLook(wDateTimeFormat);
wDateTimeFormat.addModifyListener(lsMod);
FormData fdDateTimeFormat = new FormData();
fdDateTimeFormat.left = new FormAttachment(middle, 0);
fdDateTimeFormat.top = new FormAttachment(wlSpecifyFormat, 2 * margin);
fdDateTimeFormat.right = new FormAttachment(100, 0);
wDateTimeFormat.setLayoutData(fdDateTimeFormat);
for (String dat : dats) {
wDateTimeFormat.add(dat);
}
// Add Date before extension?
wlAddDateBeforeExtension = new Label(shell, SWT.RIGHT);
wlAddDateBeforeExtension.setText(
BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.AddDateBeforeExtension.Label"));
PropsUi.setLook(wlAddDateBeforeExtension);
FormData fdlAddDateBeforeExtension = new FormData();
fdlAddDateBeforeExtension.left = new FormAttachment(0, 0);
fdlAddDateBeforeExtension.top = new FormAttachment(wDateTimeFormat, margin);
fdlAddDateBeforeExtension.right = new FormAttachment(middle, -margin);
wlAddDateBeforeExtension.setLayoutData(fdlAddDateBeforeExtension);
wAddDateBeforeExtension = new Button(shell, SWT.CHECK);
PropsUi.setLook(wAddDateBeforeExtension);
wAddDateBeforeExtension.setToolTipText(
BaseMessages.getString(
PKG, "ActionCopyMoveResultFilenames.AddDateBeforeExtension.Tooltip"));
FormData fdAddDateBeforeExtension = new FormData();
fdAddDateBeforeExtension.left = new FormAttachment(middle, 0);
fdAddDateBeforeExtension.top = new FormAttachment(wlAddDateBeforeExtension, 0, SWT.CENTER);
fdAddDateBeforeExtension.right = new FormAttachment(100, 0);
wAddDateBeforeExtension.setLayoutData(fdAddDateBeforeExtension);
wAddDateBeforeExtension.addSelectionListener(
new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
action.setChanged();
checkLimit();
}
});
// LimitTo grouping?
// ////////////////////////
// START OF LimitTo GROUP
//
Group wLimitTo = new Group(shell, SWT.SHADOW_NONE);
PropsUi.setLook(wLimitTo);
wLimitTo.setText(
BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.Group.LimitTo.Label"));
FormLayout groupLayout = new FormLayout();
groupLayout.marginWidth = 10;
groupLayout.marginHeight = 10;
wLimitTo.setLayout(groupLayout);
// Specify wildcard?
Label wlSpecifyWildcard = new Label(wLimitTo, SWT.RIGHT);
wlSpecifyWildcard.setText(
BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.SpecifyWildcard.Label"));
PropsUi.setLook(wlSpecifyWildcard);
FormData fdlSpecifyWildcard = new FormData();
fdlSpecifyWildcard.left = new FormAttachment(0, 0);
fdlSpecifyWildcard.top = new FormAttachment(wAddDateBeforeExtension, margin);
fdlSpecifyWildcard.right = new FormAttachment(middle, -margin);
wlSpecifyWildcard.setLayoutData(fdlSpecifyWildcard);
wSpecifyWildcard = new Button(wLimitTo, SWT.CHECK);
PropsUi.setLook(wSpecifyWildcard);
wSpecifyWildcard.setToolTipText(
BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.SpecifyWildcard.Tooltip"));
FormData fdSpecifyWildcard = new FormData();
fdSpecifyWildcard.left = new FormAttachment(middle, 0);
fdSpecifyWildcard.top = new FormAttachment(wlSpecifyWildcard, 0, SWT.CENTER);
fdSpecifyWildcard.right = new FormAttachment(100, 0);
wSpecifyWildcard.setLayoutData(fdSpecifyWildcard);
wSpecifyWildcard.addSelectionListener(
new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
action.setChanged();
checkLimit();
}
});
// Wildcard line
wlWildcard = new Label(wLimitTo, SWT.RIGHT);
wlWildcard.setText(BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.Wildcard.Label"));
PropsUi.setLook(wlWildcard);
FormData fdlWildcard = new FormData();
fdlWildcard.left = new FormAttachment(0, 0);
fdlWildcard.top = new FormAttachment(wlSpecifyWildcard, 2 * margin);
fdlWildcard.right = new FormAttachment(middle, -margin);
wlWildcard.setLayoutData(fdlWildcard);
wWildcard = new TextVar(variables, wLimitTo, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
wWildcard.setToolTipText(
BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.Wildcard.Tooltip"));
PropsUi.setLook(wWildcard);
wWildcard.addModifyListener(lsMod);
FormData fdWildcard = new FormData();
fdWildcard.left = new FormAttachment(middle, 0);
fdWildcard.top = new FormAttachment(wlSpecifyWildcard, 2 * margin);
fdWildcard.right = new FormAttachment(100, -margin);
wWildcard.setLayoutData(fdWildcard);
// Whenever something changes, set the tooltip to the expanded version:
wWildcard.addModifyListener(
e -> wWildcard.setToolTipText(variables.resolve(wWildcard.getText())));
// wWildcardExclude
wlWildcardExclude = new Label(wLimitTo, SWT.RIGHT);
wlWildcardExclude.setText(
BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.WildcardExclude.Label"));
PropsUi.setLook(wlWildcardExclude);
FormData fdlWildcardExclude = new FormData();
fdlWildcardExclude.left = new FormAttachment(0, 0);
fdlWildcardExclude.top = new FormAttachment(wWildcard, margin);
fdlWildcardExclude.right = new FormAttachment(middle, -margin);
wlWildcardExclude.setLayoutData(fdlWildcardExclude);
wWildcardExclude = new TextVar(variables, wLimitTo, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
wWildcardExclude.setToolTipText(
BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.WildcardExclude.Tooltip"));
PropsUi.setLook(wWildcardExclude);
wWildcardExclude.addModifyListener(lsMod);
FormData fdWildcardExclude = new FormData();
fdWildcardExclude.left = new FormAttachment(middle, 0);
fdWildcardExclude.top = new FormAttachment(wWildcard, margin);
fdWildcardExclude.right = new FormAttachment(100, -margin);
wWildcardExclude.setLayoutData(fdWildcardExclude);
// Whenever something changes, set the tooltip to the expanded version:
wWildcardExclude.addModifyListener(
e -> wWildcardExclude.setToolTipText(variables.resolve(wWildcardExclude.getText())));
FormData fdLimitTo = new FormData();
fdLimitTo.left = new FormAttachment(0, margin);
fdLimitTo.top = new FormAttachment(wAddDateBeforeExtension, margin);
fdLimitTo.right = new FormAttachment(100, -margin);
wLimitTo.setLayoutData(fdLimitTo);
// ///////////////////////////////////////////////////////////
// / END OF LimitTo GROUP
// ///////////////////////////////////////////////////////////
// SuccessOngrouping?
// ////////////////////////
// START OF SUCCESS ON GROUP///
// /
Group wSuccessOn = new Group(shell, SWT.SHADOW_NONE);
PropsUi.setLook(wSuccessOn);
wSuccessOn.setText(
BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.SuccessOn.Group.Label"));
FormLayout successongroupLayout = new FormLayout();
successongroupLayout.marginWidth = 10;
successongroupLayout.marginHeight = 10;
wSuccessOn.setLayout(successongroupLayout);
// Success Condition
Label wlSuccessCondition = new Label(wSuccessOn, SWT.RIGHT);
wlSuccessCondition.setText(
BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.SuccessCondition.Label"));
PropsUi.setLook(wlSuccessCondition);
FormData fdlSuccessCondition = new FormData();
fdlSuccessCondition.left = new FormAttachment(0, 0);
fdlSuccessCondition.right = new FormAttachment(middle, 0);
fdlSuccessCondition.top = new FormAttachment(wLimitTo, 2 * margin);
wlSuccessCondition.setLayoutData(fdlSuccessCondition);
wSuccessCondition = new CCombo(wSuccessOn, SWT.SINGLE | SWT.READ_ONLY | SWT.BORDER);
wSuccessCondition.add(
BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.SuccessWhenAllWorksFine.Label"));
wSuccessCondition.add(
BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.SuccessWhenAtLeat.Label"));
wSuccessCondition.add(
BaseMessages.getString(
PKG, "ActionCopyMoveResultFilenames.SuccessWhenErrorsLessThan.Label"));
wSuccessCondition.select(0); // +1: starts at -1
PropsUi.setLook(wSuccessCondition);
FormData fdSuccessCondition = new FormData();
fdSuccessCondition.left = new FormAttachment(middle, 0);
fdSuccessCondition.top = new FormAttachment(wLimitTo, 2 * margin);
fdSuccessCondition.right = new FormAttachment(100, 0);
wSuccessCondition.setLayoutData(fdSuccessCondition);
wSuccessCondition.addSelectionListener(
new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
activeSuccessCondition();
}
});
// Success when number of errors less than
wlNrErrorsLessThan = new Label(wSuccessOn, SWT.RIGHT);
wlNrErrorsLessThan.setText(
BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.NrErrorsLessThan.Label"));
PropsUi.setLook(wlNrErrorsLessThan);
FormData fdlNrErrorsLessThan = new FormData();
fdlNrErrorsLessThan.left = new FormAttachment(0, 0);
fdlNrErrorsLessThan.top = new FormAttachment(wSuccessCondition, margin);
fdlNrErrorsLessThan.right = new FormAttachment(middle, -margin);
wlNrErrorsLessThan.setLayoutData(fdlNrErrorsLessThan);
wNrErrorsLessThan =
new TextVar(
variables,
wSuccessOn,
SWT.SINGLE | SWT.LEFT | SWT.BORDER,
BaseMessages.getString(PKG, "ActionCopyMoveResultFilenames.NrErrorsLessThan.Tooltip"));
PropsUi.setLook(wNrErrorsLessThan);
wNrErrorsLessThan.addModifyListener(lsMod);
FormData fdNrErrorsLessThan = new FormData();
fdNrErrorsLessThan.left = new FormAttachment(middle, 0);
fdNrErrorsLessThan.top = new FormAttachment(wSuccessCondition, margin);
fdNrErrorsLessThan.right = new FormAttachment(100, -margin);
wNrErrorsLessThan.setLayoutData(fdNrErrorsLessThan);
FormData fdSuccessOn = new FormData();
fdSuccessOn.left = new FormAttachment(0, margin);
fdSuccessOn.top = new FormAttachment(wLimitTo, margin);
fdSuccessOn.right = new FormAttachment(100, -margin);
wSuccessOn.setLayoutData(fdSuccessOn);
// ///////////////////////////////////////////////////////////
// / END OF Success ON GROUP
// ///////////////////////////////////////////////////////////
Button wOk = new Button(shell, SWT.PUSH);
wOk.setText(BaseMessages.getString(PKG, "System.Button.OK"));
wOk.addListener(SWT.Selection, e -> ok());
Button wCancel = new Button(shell, SWT.PUSH);
wCancel.setText(BaseMessages.getString(PKG, "System.Button.Cancel"));
wCancel.addListener(SWT.Selection, e -> cancel());
BaseTransformDialog.positionBottomButtons(
shell, new Button[] {wOk, wCancel}, margin, wSuccessOn);
getData();
checkLimit();
setDateTimeFormat();
activeSuccessCondition();
setAddDateBeforeExtension();
enableAction();
BaseDialog.defaultShellHandling(shell, c -> ok(), c -> cancel());
return action;
}
private void setAddDateBeforeExtension() {
wlAddDateBeforeExtension.setEnabled(
wAddDate.getSelection() || wAddTime.getSelection() || wSpecifyFormat.getSelection());
wAddDateBeforeExtension.setEnabled(
wAddDate.getSelection() || wAddTime.getSelection() || wSpecifyFormat.getSelection());
if (!wAddDate.getSelection() && !wAddTime.getSelection() && !wSpecifyFormat.getSelection()) {
wAddDateBeforeExtension.setSelection(false);
}
}
private void activeSuccessCondition() {
wlNrErrorsLessThan.setEnabled(wSuccessCondition.getSelectionIndex() != 0);
wNrErrorsLessThan.setEnabled(wSuccessCondition.getSelectionIndex() != 0);
}
private void checkLimit() {
wlWildcard.setEnabled(wSpecifyWildcard.getSelection());
wWildcard.setEnabled(wSpecifyWildcard.getSelection());
wlWildcardExclude.setEnabled(wSpecifyWildcard.getSelection());
wWildcardExclude.setEnabled(wSpecifyWildcard.getSelection());
}
private void setDateTimeFormat() {
if (wSpecifyFormat.getSelection()) {
wAddDate.setSelection(false);
wAddTime.setSelection(false);
}
wDateTimeFormat.setEnabled(wSpecifyFormat.getSelection());
wlDateTimeFormat.setEnabled(wSpecifyFormat.getSelection());
wAddDate.setEnabled(!wSpecifyFormat.getSelection());
wlAddDate.setEnabled(!wSpecifyFormat.getSelection());
wAddTime.setEnabled(!wSpecifyFormat.getSelection());
wlAddTime.setEnabled(!wSpecifyFormat.getSelection());
}
/** Copy information from the meta-data input to the dialog fields. */
public void getData() {
if (action.getName() != null) {
wName.setText(action.getName());
}
wSpecifyWildcard.setSelection(action.isSpecifyWildcard());
if (action.getWildcard() != null) {
wWildcard.setText(action.getWildcard());
}
if (action.getWildcardExclude() != null) {
wWildcardExclude.setText(action.getWildcardExclude());
}
if (action.getDestinationFolder() != null) {
wFoldername.setText(action.getDestinationFolder());
}
if (action.getNrErrorsLessThan() != null) {
wNrErrorsLessThan.setText(action.getNrErrorsLessThan());
} else {
wNrErrorsLessThan.setText("10");
}
if (action.getSuccessCondition() != null) {
if (action
.getSuccessCondition()
.equals(ActionCopyMoveResultFilenames.SUCCESS_IF_AT_LEAST_X_FILES)) {
wSuccessCondition.select(1);
} else if (action
.getSuccessCondition()
.equals(ActionCopyMoveResultFilenames.SUCCESS_IF_ERRORS_LESS)) {
wSuccessCondition.select(2);
} else {
wSuccessCondition.select(0);
}
} else {
wSuccessCondition.select(0);
}
if (action.getAction() != null) {
if (action.getAction().equals("move")) {
wAction.select(1);
} else if (action.getAction().equals("delete")) {
wAction.select(2);
} else {
wAction.select(0);
}
} else {
wAction.select(0);
}
if (action.getDateTimeFormat() != null) {
wDateTimeFormat.setText(action.getDateTimeFormat());
}
wAddDate.setSelection(action.isAddDate());
wAddTime.setSelection(action.isAddTime());
wSpecifyFormat.setSelection(action.isSpecifyFormat());
wAddDateBeforeExtension.setSelection(action.isAddDateBeforeExtension());
wOverwriteFile.setSelection(action.isOverwriteFile());
wCreateDestinationFolder.setSelection(action.isCreateDestinationFolder());
wRemovedSourceFilename.setSelection(action.isRemovedSourceFilename());
wAddDestinationFilename.setSelection(action.isAddDestinationFilename());
wName.selectAll();
wName.setFocus();
}
private void cancel() {
action.setChanged(changed);
action = null;
dispose();
}
private void ok() {
if (Utils.isEmpty(wName.getText())) {
MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR);
mb.setText(BaseMessages.getString(PKG, "System.TransformActionNameMissing.Title"));
mb.setMessage(BaseMessages.getString(PKG, "System.ActionNameMissing.Msg"));
mb.open();
return;
}
action.setName(wName.getText());
action.setSpecifyWildcard(wSpecifyWildcard.getSelection());
action.setWildcard(wWildcard.getText());
action.setWildcardExclude(wWildcardExclude.getText());
action.setDestinationFolder(wFoldername.getText());
action.setNrErrorsLessThan(wNrErrorsLessThan.getText());
if (wSuccessCondition.getSelectionIndex() == 1) {
action.setSuccessCondition(ActionCopyMoveResultFilenames.SUCCESS_IF_AT_LEAST_X_FILES);
} else if (wSuccessCondition.getSelectionIndex() == 2) {
action.setSuccessCondition(ActionCopyMoveResultFilenames.SUCCESS_IF_ERRORS_LESS);
} else {
action.setSuccessCondition(ActionCopyMoveResultFilenames.SUCCESS_IF_NO_ERRORS);
}
if (wAction.getSelectionIndex() == 1) {
action.setAction("move");
} else if (wAction.getSelectionIndex() == 2) {
action.setAction("delete");
} else {
action.setAction("copy");
}
action.setAddDate(wAddDate.getSelection());
action.setAddTime(wAddTime.getSelection());
action.setSpecifyFormat(wSpecifyFormat.getSelection());
action.setDateTimeFormat(wDateTimeFormat.getText());
action.setAddDateBeforeExtension(wAddDateBeforeExtension.getSelection());
action.setOverwriteFile(wOverwriteFile.getSelection());
action.setCreateDestinationFolder(wCreateDestinationFolder.getSelection());
action.setRemovedSourceFilename(wRemovedSourceFilename.getSelection());
action.setAddDestinationFilename(wAddDestinationFilename.getSelection());
dispose();
}
private void enableAction() {
boolean copyOrMove = wAction.getSelectionIndex() != 2;
wlCreateDestinationFolder.setEnabled(copyOrMove);
wCreateDestinationFolder.setEnabled(copyOrMove);
wlOverwriteFile.setEnabled(copyOrMove);
wOverwriteFile.setEnabled(copyOrMove);
wlRemovedSourceFilename.setEnabled(copyOrMove);
wRemovedSourceFilename.setEnabled(copyOrMove);
wlAddDestinationFilename.setEnabled(copyOrMove);
wAddDestinationFilename.setEnabled(copyOrMove);
wlAddDate.setEnabled(copyOrMove);
wAddDate.setEnabled(copyOrMove);
wlAddTime.setEnabled(copyOrMove);
wAddTime.setEnabled(copyOrMove);
wlSpecifyFormat.setEnabled(copyOrMove);
wSpecifyFormat.setEnabled(copyOrMove);
wlDateTimeFormat.setEnabled(copyOrMove);
wDateTimeFormat.setEnabled(copyOrMove);
wAddDateBeforeExtension.setEnabled(copyOrMove);
wlAddDateBeforeExtension.setEnabled(copyOrMove);
wlFoldername.setEnabled(copyOrMove);
wFoldername.setEnabled(copyOrMove);
wbFoldername.setEnabled(copyOrMove);
}
}
|
apache/directory-studio | 36,129 | plugins/openldap.config.editor/src/main/java/org/apache/directory/studio/openldap/config/model/io/ConfigurationReader.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.studio.openldap.config.model.io;
import java.io.File;
import java.lang.reflect.Array;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.directory.api.ldap.model.constants.LdapConstants;
import org.apache.directory.api.ldap.model.constants.SchemaConstants;
import org.apache.directory.api.ldap.model.entry.Attribute;
import org.apache.directory.api.ldap.model.entry.Entry;
import org.apache.directory.api.ldap.model.entry.Value;
import org.apache.directory.api.ldap.model.exception.LdapInvalidDnException;
import org.apache.directory.api.ldap.model.exception.LdapNoSuchObjectException;
import org.apache.directory.api.ldap.model.message.SearchScope;
import org.apache.directory.api.ldap.model.name.Dn;
import org.apache.directory.api.ldap.model.schema.ObjectClass;
import org.apache.directory.api.ldap.model.schema.SchemaManager;
import org.apache.directory.api.ldap.util.tree.DnNode;
import org.apache.directory.studio.common.core.jobs.StudioProgressMonitor;
import org.apache.directory.studio.connection.core.Connection;
import org.apache.directory.studio.connection.core.io.api.StudioSearchResult;
import org.apache.directory.studio.connection.core.io.api.StudioSearchResultEnumeration;
import org.apache.directory.studio.ldapbrowser.core.BrowserCorePlugin;
import org.apache.directory.studio.ldapbrowser.core.jobs.SearchRunnable;
import org.apache.directory.studio.ldapbrowser.core.model.IBrowserConnection;
import org.apache.directory.studio.ldapbrowser.core.model.SearchParameter;
import org.apache.directory.studio.openldap.config.ExpandedLdifUtils;
import org.apache.directory.studio.openldap.config.OpenLdapConfigurationPlugin;
import org.apache.directory.studio.openldap.config.editor.ConnectionServerConfigurationInput;
import org.apache.directory.studio.openldap.config.editor.DirectoryServerConfigurationInput;
import org.apache.directory.studio.openldap.config.editor.OpenLdapServerConfigurationEditorUtils;
import org.apache.directory.studio.openldap.config.jobs.EntryBasedConfigurationPartition;
import org.apache.directory.studio.openldap.config.model.AuxiliaryObjectClass;
import org.apache.directory.studio.openldap.config.model.ConfigurationElement;
import org.apache.directory.studio.openldap.config.model.OlcConfig;
import org.apache.directory.studio.openldap.config.model.OlcGlobal;
import org.apache.directory.studio.openldap.config.model.OlcModuleList;
import org.apache.directory.studio.openldap.config.model.OlcOverlayConfig;
import org.apache.directory.studio.openldap.config.model.OpenLdapConfiguration;
import org.apache.directory.studio.openldap.config.model.database.OlcDatabaseConfig;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.osgi.util.NLS;
/**
* This class implements a configuration reader for OpenLDAP.
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
public class ConfigurationReader
{
private ConfigurationReader()
{
// Nothing to do
}
/** The package name where the model classes are stored */
private static final String MODEL_PACKAGE_NAME = "org.apache.directory.studio.openldap.config.model";
/** The package name where the database model classes are stored */
private static final String DATABASE_PACKAGE_NAME = "org.apache.directory.studio.openldap.config.model.database";
/** The package name where the overlay model classes are stored */
private static final String OVERLAY_PACKAGE_NAME = "org.apache.directory.studio.openldap.config.model.overlay";
/**
* Reads the configuration.
*
* @param input the input
* @return the OpenLDAP configuration
* @throws Exception
*/
public static OpenLdapConfiguration readConfiguration( ConnectionServerConfigurationInput input ) throws Exception
{
// Creating a new OpenLDAP configuration
OpenLdapConfiguration configuration = new OpenLdapConfiguration();
// Saving the connection to the configuration
configuration.setConnection( input.getConnection() );
// Getting the browser connection associated with the connection in the input
IBrowserConnection browserConnection = BrowserCorePlugin.getDefault().getConnectionManager()
.getBrowserConnection( input.getConnection() );
// Find the location of the configuration
Dn configurationDn = ConfigurationUtils.getConfigurationDn( browserConnection );
// Reading the configuration entries on the server
List<Entry> configurationEntries = readEntries( configurationDn, input, browserConnection );
// Creating a map to store object created based on their DN
Map<Dn, OlcConfig> dnToConfigObjectMap = new HashMap<>();
// For each configuration entries we create an associated configuration
// object and store it in the OpenLDAP configuration
for ( Entry entry : configurationEntries )
{
// Converting the entry into a configuration object
OlcConfig configurationObject = createConfigurationObject( entry );
if ( configurationObject != null )
{
// Storing the object in the configuration objects map
dnToConfigObjectMap.put( entry.getDn(), configurationObject );
if ( configurationObject instanceof OlcOverlayConfig )
{
OlcOverlayConfig overlayConfig = ( OlcOverlayConfig ) configurationObject;
OlcDatabaseConfig databaseConfig = ( OlcDatabaseConfig ) dnToConfigObjectMap.get( entry.getDn()
.getParent() );
if ( databaseConfig != null )
{
databaseConfig.addOverlay( overlayConfig );
}
else
{
configuration.add( overlayConfig );
}
}
else if ( configurationObject instanceof OlcGlobal )
{
configuration.setGlobal( ( OlcGlobal ) configurationObject );
}
else if ( configurationObject instanceof OlcModuleList )
{
configuration.add( (OlcModuleList)configurationObject );
}
else if ( configurationObject instanceof OlcDatabaseConfig )
{
configuration.add( ( OlcDatabaseConfig ) configurationObject );
}
else
{
configuration.add( configurationObject );
}
}
}
return configuration;
}
/**
* Reads the configuration.
*
* @param input the input
* @return the OpenLDAP configuration
* @throws Exception
*/
public static OpenLdapConfiguration readConfiguration( DirectoryServerConfigurationInput input ) throws Exception
{
return readConfiguration( input.getDirectory() );
}
/**
* Reads the configuration.
*
* @param directory the directory
* @return the OpenLDAP configuration
* @throws Exception
*/
public static OpenLdapConfiguration readConfiguration( File directory ) throws Exception
{
// Creating a new OpenLDAP configuration
OpenLdapConfiguration configuration = new OpenLdapConfiguration();
// Reading the configuration entries disk
DnNode<Entry> tree = readEntries( directory );
// Creating configuration objects
createConfigurationObjects( tree, configuration );
return configuration;
}
/**
* Creates the configuration objects.
*
* @param configuration the configuration
* @param dnToConfigObjectMap the maps to store
*/
/**
* Creates the configuration objects.
*
* @param tree the tree
* @param configuration the configuration
* @throws ConfigurationException
*/
private static void createConfigurationObjects( DnNode<Entry> tree, OpenLdapConfiguration configuration )
throws ConfigurationException
{
// Creating a map to store object created based on their DN
Map<Dn, OlcConfig> dnToConfigObjectMap = new HashMap<>();
createConfigurationObjects( tree, configuration, dnToConfigObjectMap );
}
/**
* Creates the configuration objects.
*
* @param node the node
* @param configuration the configuration
* @param dnToConfigObjectMap the maps to associate DNs to configuration objects
* @throws ConfigurationException
*/
private static void createConfigurationObjects( DnNode<Entry> node, OpenLdapConfiguration configuration,
Map<Dn, OlcConfig> dnToConfigObjectMap ) throws ConfigurationException
{
if ( node != null )
{
// Checking if the node as an element
if ( node.hasElement() )
{
// Getting the entry for the node
Entry entry = node.getElement();
// Converting the entry into a configuration object
OlcConfig configurationObject = createConfigurationObject( entry );
if ( configurationObject != null )
{
// Storing the object in the configuration objects map
dnToConfigObjectMap.put( entry.getDn(), configurationObject );
// Checking if it's an overlay
if ( configurationObject instanceof OlcOverlayConfig )
{
OlcOverlayConfig overlayConfig = ( OlcOverlayConfig ) configurationObject;
// Getting the associated database configuration object
OlcDatabaseConfig databaseConfig = ( OlcDatabaseConfig ) dnToConfigObjectMap.get( entry.getDn()
.getParent() );
if ( databaseConfig != null )
{
databaseConfig.addOverlay( overlayConfig );
}
else
{
configuration.add( overlayConfig );
}
}
// Checking if it's the "global' configuration object
else if ( configurationObject instanceof OlcGlobal )
{
configuration.setGlobal( ( OlcGlobal ) configurationObject );
}
// Checking if it's a database
else if ( configurationObject instanceof OlcDatabaseConfig )
{
configuration.add( (OlcDatabaseConfig)configurationObject );
}
// Any other object type
else
{
configuration.add( configurationObject );
}
}
}
// Checking the node has some children
if ( node.hasChildren() )
{
Collection<DnNode<Entry>> children = node.getChildren().values();
for ( DnNode<Entry> child : children )
{
createConfigurationObjects( child, configuration, dnToConfigObjectMap );
}
}
}
}
/**
* Reads the configuration entries from the input.
*
* @param directory the directory
* @return the tree of configuration entries found
* @throws Exception if an error occurred
*/
private static DnNode<Entry> readEntries( File directory )
throws Exception
{
// Reading the entries tree
DnNode<Entry> tree = ExpandedLdifUtils.read( directory );
// Checking the read tree
if ( ( tree != null ) && ( tree.size() != 0 ) )
{
return tree;
}
else
{
throw new Exception( "No entries found" );
}
}
/**
* Gets the highest structural object class found in the attribute.
*
* @param objectClassAttribute the 'objectClass' attribute
* @return the highest structural object class found in the attribute.
*/
public static ObjectClass getHighestStructuralObjectClass( Attribute objectClassAttribute )
throws ConfigurationException
{
Set<ObjectClass> candidates = new HashSet<>();
try
{
SchemaManager schemaManager = OpenLdapConfigurationPlugin.getDefault().getSchemaManager();
if ( ( objectClassAttribute != null ) && ( schemaManager != null ) )
{
// Create the set of candidates
for ( Value objectClassValue : objectClassAttribute )
{
ObjectClass oc = OpenLdapServerConfigurationEditorUtils.getObjectClass( schemaManager,
objectClassValue.getString() );
if ( ( oc != null ) && ( oc.isStructural() ) )
{
candidates.add( oc );
}
}
// Now find the parent OC
for ( Value objectClassValue : objectClassAttribute )
{
ObjectClass oc = OpenLdapServerConfigurationEditorUtils.getObjectClass( schemaManager,
objectClassValue.getString() );
if ( oc != null )
{
for ( String superiorName : oc.getSuperiorOids() )
{
ObjectClass superior = OpenLdapServerConfigurationEditorUtils.getObjectClass( schemaManager,
superiorName );
if ( ( superior != null ) && ( superior.isStructural() )
&& ( candidates.contains( superior ) ) )
{
candidates.remove( superior );
}
}
}
}
}
}
catch ( Exception e )
{
throw new ConfigurationException( e );
}
// The remaining OC in the candidates set is the one we are looking for
return candidates.toArray( new ObjectClass[]
{} )[0];
}
/**
* Gets the auxiliary object classes found in the attribute.
*
* @param objectClassAttribute the 'objectClass' attribute
* @return the auxiliary object classes found in the attribute.
*/
public static ObjectClass[] getAuxiliaryObjectClasses( Attribute objectClassAttribute )
throws ConfigurationException
{
List<ObjectClass> auxiliaryObjectClasses = new ArrayList<>();
try
{
SchemaManager schemaManager = OpenLdapConfigurationPlugin.getDefault().getSchemaManager();
if ( ( objectClassAttribute != null ) && ( schemaManager != null ) )
{
for ( Value objectClassValue : objectClassAttribute )
{
ObjectClass oc = OpenLdapServerConfigurationEditorUtils.getObjectClass( schemaManager,
objectClassValue.getString() );
if ( ( oc != null ) && ( oc.isAuxiliary() ) )
{
auxiliaryObjectClasses.add( oc );
}
}
}
}
catch ( Exception e )
{
throw new ConfigurationException( e );
}
return auxiliaryObjectClasses.toArray( new ObjectClass[0] );
}
/**
* Reads the configuration entries from the input.
*
* @param configurationDn the configuration DN
* @param input the editor input
* @param browserConnection the connection
* @return the list of configuration entries found
* @throws Exception if an error occurred
*/
public static List<Entry> readEntries( Dn configurationDn, ConnectionServerConfigurationInput input,
IBrowserConnection browserConnection ) throws Exception
{
List<Entry> foundEntries = new ArrayList<>();
IProgressMonitor progressMonitor = new NullProgressMonitor();
StudioProgressMonitor monitor = new StudioProgressMonitor( progressMonitor );
Connection connection = input.getConnection();
// Creating the schema manager
SchemaManager schemaManager = OpenLdapConfigurationPlugin.getDefault().getSchemaManager();
// The DN corresponding to the configuration base
// Creating the configuration partition
EntryBasedConfigurationPartition configurationPartition = OpenLdapServerConfigurationEditorUtils
.createConfigurationPartition( schemaManager, configurationDn );
// Opening the connection (if needed)
ConfigurationUtils.openConnection( connection, monitor );
// Creating the search parameter
SearchParameter configSearchParameter = new SearchParameter();
configSearchParameter.setSearchBase( configurationDn );
configSearchParameter.setFilter( LdapConstants.OBJECT_CLASS_STAR );
configSearchParameter.setScope( SearchScope.OBJECT );
configSearchParameter.setReturningAttributes( SchemaConstants.ALL_USER_ATTRIBUTES_ARRAY );
// Looking for the 'ou=config' base entry
Entry configEntry = null;
StudioSearchResultEnumeration enumeration = SearchRunnable.search( browserConnection, configSearchParameter,
monitor );
// Checking if an error occurred
if ( monitor.errorsReported() )
{
throw monitor.getException();
}
// Getting the entry
if ( enumeration.hasMore() )
{
// Creating the base entry
StudioSearchResult searchResult = enumeration.next();
configEntry = searchResult.getEntry();
}
enumeration.close();
// Verifying we found the base entry
if ( configEntry == null )
{
throw new LdapNoSuchObjectException( NLS.bind( "Unable to find the ''{0}'' base entry.", configurationDn ) );
}
// Creating a list to hold the entries that needs to be checked
// for children and added to the partition
List<Entry> entries = new ArrayList<>();
entries.add( configEntry );
// Looping on the entries list until it's empty
while ( !entries.isEmpty() )
{
// Removing the first entry from the list
Entry entry = entries.remove( 0 );
// Adding the entry to the partition and the entries list
configurationPartition.addEntry( entry );
foundEntries.add( entry );
SearchParameter searchParameter = new SearchParameter();
searchParameter.setSearchBase( entry.getDn() );
searchParameter.setFilter( LdapConstants.OBJECT_CLASS_STAR );
searchParameter.setScope( SearchScope.ONELEVEL );
searchParameter.setReturningAttributes( SchemaConstants.ALL_USER_ATTRIBUTES_ARRAY );
// Looking for the children of the entry
StudioSearchResultEnumeration childrenEnumeration = SearchRunnable.search( browserConnection,
searchParameter, monitor );
// Checking if an error occurred
if ( monitor.errorsReported() )
{
throw monitor.getException();
}
while ( childrenEnumeration.hasMore() )
{
// Creating the child entry
StudioSearchResult searchResult = childrenEnumeration.next();
Entry childEntry = searchResult.getEntry();
// Adding the children to the list of entries
entries.add( childEntry );
}
childrenEnumeration.close();
}
// Setting the created partition to the input
input.setOriginalPartition( configurationPartition );
return foundEntries;
}
private static OlcConfig createConfigurationObject( Entry entry )
throws ConfigurationException
{
// Getting the 'objectClass' attribute
Attribute objectClassAttribute = entry.get( SchemaConstants.OBJECT_CLASS_AT );
if ( objectClassAttribute != null )
{
// Getting the highest structural object class based on schema
ObjectClass highestStructuralObjectClass = getHighestStructuralObjectClass( objectClassAttribute );
// Computing the class name for the bean corresponding to the structural object class
String highestObjectClassName = highestStructuralObjectClass.getName();
StringBuilder className = new StringBuilder();
if ( objectClassAttribute.contains( "olcDatabaseConfig" ) )
{
className.append( DATABASE_PACKAGE_NAME );
}
else if ( objectClassAttribute.contains( "olcOverlayConfig" ) )
{
className.append( OVERLAY_PACKAGE_NAME );
}
else
{
className.append( MODEL_PACKAGE_NAME );
}
className.append( "." );
className.append( Character.toUpperCase( highestObjectClassName.charAt( 0 ) ) );
className.append( highestObjectClassName.substring( 1 ) );
// Instantiating the object
OlcConfig bean = null;
try
{
Class<?> clazz = Class.forName( className.toString() );
Constructor<?> constructor = clazz.getConstructor();
bean = ( OlcConfig ) constructor.newInstance();
}
catch ( Exception e )
{
throw new ConfigurationException( e );
}
// Checking if the bean as been created
if ( bean == null )
{
throw new ConfigurationException( "The instantiated bean for '" + highestObjectClassName + "' is null" );
}
// Checking auxiliary object classes
ObjectClass[] auxiliaryObjectClasses = getAuxiliaryObjectClasses( objectClassAttribute );
if ( ( auxiliaryObjectClasses != null ) && ( auxiliaryObjectClasses.length > 0 ) )
{
for ( ObjectClass auxiliaryObjectClass : auxiliaryObjectClasses )
{
// Computing the class name for the bean corresponding to the auxiliary object class
String auxiliaryObjectClassName = auxiliaryObjectClass.getName();
className = new StringBuilder();
className.append( MODEL_PACKAGE_NAME );
className.append( "." );
className.append( Character.toUpperCase( auxiliaryObjectClassName.charAt( 0 ) ) );
className.append( auxiliaryObjectClassName.substring( 1 ) );
// Instantiating the object
AuxiliaryObjectClass auxiliaryObjectClassBean = null;
try
{
Class<?> clazz = Class.forName( className.toString() );
Constructor<?> constructor = clazz.getConstructor();
auxiliaryObjectClassBean = ( AuxiliaryObjectClass ) constructor.newInstance();
}
catch ( Exception e )
{
throw new ConfigurationException( e );
}
// Checking if the bean as been created
if ( auxiliaryObjectClassBean == null )
{
throw new ConfigurationException( "The instantiated auxiliary object class bean for '"
+ auxiliaryObjectClassName + "' is null" );
}
// Reading all values
readValues( entry, auxiliaryObjectClassBean );
// Adding the auxiliary object class bean to the bean
bean.addAuxiliaryObjectClasses( auxiliaryObjectClassBean );
}
}
// Reading all values
readValues( entry, bean );
// Storing the parent DN
bean.setParentDn( entry.getDn().getParent() );
return bean;
}
return null;
}
/**
* Reads the values of the entry and saves them to the bean.
*
* @param entry the entry
* @param bean then bean
* @throws ConfigurationException
*/
private static void readValues( Entry entry, Object bean ) throws ConfigurationException
{
// Checking all fields of the bean (including super class fields)
Class<?> clazz = bean.getClass();
while ( clazz != null )
{
// Looping on all fields of the class
Field[] fields = clazz.getDeclaredFields();
for ( Field field : fields )
{
// Looking for the @ConfigurationElement annotation
ConfigurationElement configurationElement = field.getAnnotation( ConfigurationElement.class );
if ( configurationElement != null )
{
// Checking if we're have a value for the attribute type
String attributeType = configurationElement.attributeType();
if ( ( attributeType != null ) && ( !"".equals( attributeType ) ) )
{
Attribute attribute = entry.get( attributeType );
if ( ( attribute != null ) && ( attribute.size() > 0 ) )
{
// Making the field accessible (we get an exception if we don't do that)
field.setAccessible( true );
// loop on the values and inject them in the bean
for ( Value value : attribute )
{
readAttributeValue( bean, field, attribute, value );
}
}
}
}
}
// Switching to the super class
clazz = clazz.getSuperclass();
}
}
/**
* Reads the attribute value.
*
* @param bean the bean
* @param field the field
* @param attribute the attribute
* @param value the value
* @throws ConfigurationException
*/
private static void readAttributeValue( Object bean, Field field, Attribute attribute, Value value )
throws ConfigurationException
{
Class<?> type = field.getType();
String addMethodName = "add" + Character.toUpperCase( field.getName().charAt( 0 ) )
+ field.getName().substring( 1 );
String valueStr = value.getString();
try
{
// String class
if ( type == String.class )
{
Object stringValue = readSingleValue( type, attribute, valueStr );
if ( stringValue != null )
{
field.set( bean, stringValue );
}
}
// Int primitive type
else if ( type == int.class )
{
Object integerValue = readSingleValue( type, attribute, valueStr );
if ( integerValue != null )
{
field.setInt( bean, ( ( Integer ) integerValue ).intValue() );
}
}
// Integer class
else if ( type == Integer.class )
{
Object integerValue = readSingleValue( type, attribute, valueStr );
if ( integerValue != null )
{
field.set( bean, ( Integer ) integerValue );
}
}
// Long primitive type
else if ( type == long.class )
{
Object longValue = readSingleValue( type, attribute, valueStr );
if ( longValue != null )
{
field.setLong( bean, ( ( Long ) longValue ).longValue() );
}
}
// Long class
else if ( type == Long.class )
{
Object longValue = readSingleValue( type, attribute, valueStr );
if ( longValue != null )
{
field.setLong( bean, ( Long ) longValue );
}
}
// Boolean primitive type
else if ( type == boolean.class )
{
Object booleanValue = readSingleValue( type, attribute, valueStr );
if ( booleanValue != null )
{
field.setBoolean( bean, ( ( Boolean ) booleanValue ).booleanValue() );
}
}
// Boolean class
else if ( type == Boolean.class )
{
Object booleanValue = readSingleValue( type, attribute, valueStr );
if ( booleanValue != null )
{
field.set( bean, ( Boolean ) booleanValue );
}
}
// Dn class
else if ( type == Dn.class )
{
Object dnValue = readSingleValue( type, attribute, valueStr );
if ( dnValue != null )
{
field.set( bean, dnValue );
}
}
// Set class
else if ( type == Set.class )
{
Type genericFieldType = field.getGenericType();
if ( genericFieldType instanceof ParameterizedType )
{
ParameterizedType parameterizedType = ( ParameterizedType ) genericFieldType;
Type[] fieldArgTypes = parameterizedType.getActualTypeArguments();
if ( ( fieldArgTypes != null ) && ( fieldArgTypes.length > 0 ) )
{
Class<?> fieldArgClass = ( Class<?> ) fieldArgTypes[0];
Object methodParameter = Array.newInstance( fieldArgClass, 1 );
Array.set( methodParameter, 0, readSingleValue( fieldArgClass, attribute, valueStr ) );
Method method = bean.getClass().getMethod( addMethodName, methodParameter.getClass() );
method.invoke( bean, methodParameter );
}
}
}
// List class
else if ( type == List.class )
{
Type genericFieldType = field.getGenericType();
if ( genericFieldType instanceof ParameterizedType )
{
ParameterizedType parameterizedType = ( ParameterizedType ) genericFieldType;
Type[] fieldArgTypes = parameterizedType.getActualTypeArguments();
if ( ( fieldArgTypes != null ) && ( fieldArgTypes.length > 0 ) )
{
Class<?> fieldArgClass = ( Class<?> ) fieldArgTypes[0];
Object methodParameter = Array.newInstance( fieldArgClass, 1 );
Array.set( methodParameter, 0, readSingleValue( fieldArgClass, attribute, valueStr ) );
Method method = bean.getClass().getMethod( addMethodName, methodParameter.getClass() );
method.invoke( bean, methodParameter );
}
}
}
}
catch ( IllegalArgumentException | IllegalAccessException iae )
{
throw new ConfigurationException( "Cannot store '" + valueStr + "' into attribute "
+ attribute.getId() );
}
catch ( SecurityException se )
{
throw new ConfigurationException( "Cannot access to the class "
+ bean.getClass().getName() );
}
catch ( NoSuchMethodException nsme )
{
throw new ConfigurationException( "Cannot find a method " + addMethodName
+ " in the class "
+ bean.getClass().getName() );
}
catch ( InvocationTargetException ite )
{
throw new ConfigurationException( "Cannot invoke the class "
+ bean.getClass().getName() + ", "
+ ite.getMessage() );
}
catch ( NegativeArraySizeException nase )
{
// No way that can happen...
}
}
/**
* Reads a single value attribute.
*
* @param field the field
* @param attribute the attribute
* @param value the value as a String
* @throws ConfigurationException
*/
private static Object readSingleValue( Class<?> type, Attribute attribute, String value )
throws ConfigurationException
{
try
{
// String class
if ( type == String.class )
{
return value;
}
// Int primitive type
else if ( type == int.class )
{
return new Integer( value );
}
// Integer class
else if ( type == Integer.class )
{
return new Integer( value );
}
// Long class
else if ( type == long.class )
{
return new Long( value );
}
// Boolean primitive type
else if ( type == boolean.class )
{
return new Boolean( value );
}
// Boolean class
else if ( type == Boolean.class )
{
return new Boolean( value );
}
// Dn class
else if ( type == Dn.class )
{
try
{
return new Dn( value );
}
catch ( LdapInvalidDnException lide )
{
throw new ConfigurationException( "The Dn '" + value + "' for attribute " + attribute.getId()
+ " is not a valid Dn" );
}
}
return null;
}
catch ( IllegalArgumentException iae )
{
throw new ConfigurationException( "Cannot store '" + value + "' into attribute "
+ attribute.getId() );
}
}
}
|
apache/fineract | 36,631 | fineract-loan/src/main/java/org/apache/fineract/accounting/productaccountmapping/service/LoanProductToGLAccountMappingHelper.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.accounting.productaccountmapping.service;
import com.google.gson.JsonElement;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.fineract.accounting.common.AccountingConstants.AccrualAccountsForLoan;
import org.apache.fineract.accounting.common.AccountingConstants.CashAccountsForLoan;
import org.apache.fineract.accounting.common.AccountingConstants.LoanProductAccountingParams;
import org.apache.fineract.accounting.common.AccountingRuleType;
import org.apache.fineract.accounting.glaccount.domain.GLAccount;
import org.apache.fineract.accounting.glaccount.domain.GLAccountRepository;
import org.apache.fineract.accounting.glaccount.domain.GLAccountRepositoryWrapper;
import org.apache.fineract.accounting.glaccount.domain.GLAccountType;
import org.apache.fineract.accounting.producttoaccountmapping.domain.ProductToGLAccountMapping;
import org.apache.fineract.accounting.producttoaccountmapping.domain.ProductToGLAccountMappingRepository;
import org.apache.fineract.accounting.producttoaccountmapping.exception.ProductToGLAccountMappingInvalidException;
import org.apache.fineract.accounting.producttoaccountmapping.service.ProductToGLAccountMappingHelper;
import org.apache.fineract.infrastructure.codes.domain.CodeValueRepository;
import org.apache.fineract.infrastructure.core.api.JsonCommand;
import org.apache.fineract.infrastructure.core.serialization.FromJsonHelper;
import org.apache.fineract.portfolio.PortfolioProductType;
import org.apache.fineract.portfolio.charge.domain.ChargeRepositoryWrapper;
import org.apache.fineract.portfolio.paymenttype.domain.PaymentTypeRepositoryWrapper;
import org.springframework.stereotype.Component;
@Component
public class LoanProductToGLAccountMappingHelper extends ProductToGLAccountMappingHelper {
public LoanProductToGLAccountMappingHelper(final GLAccountRepository glAccountRepository,
final ProductToGLAccountMappingRepository glAccountMappingRepository, final FromJsonHelper fromApiJsonHelper,
final ChargeRepositoryWrapper chargeRepositoryWrapper, final GLAccountRepositoryWrapper accountRepositoryWrapper,
final PaymentTypeRepositoryWrapper paymentTypeRepositoryWrapper, final CodeValueRepository codeValueRepository) {
super(glAccountRepository, glAccountMappingRepository, fromApiJsonHelper, chargeRepositoryWrapper, accountRepositoryWrapper,
paymentTypeRepositoryWrapper, codeValueRepository);
}
/***
* Set of abstractions for saving Loan Products to GL Account Mappings
***/
public void saveLoanToAssetAccountMapping(final JsonElement element, final String paramName, final Long productId,
final int placeHolderTypeId) {
saveProductToAccountMapping(element, paramName, productId, placeHolderTypeId, GLAccountType.ASSET, PortfolioProductType.LOAN);
}
public void saveLoanToAssetOrLiabilityAccountMapping(final JsonElement element, final String paramName, final Long productId,
final int placeHolderTypeId) {
GLAccountType glAccountType = getGLAccountType(element, paramName, ASSET_LIABILITY_TYPES);
if (glAccountType != null) {
saveProductToAccountMapping(element, paramName, productId, placeHolderTypeId, glAccountType, PortfolioProductType.LOAN);
}
}
public void saveLoanToIncomeAccountMapping(final JsonElement element, final String paramName, final Long productId,
final int placeHolderTypeId) {
saveProductToAccountMapping(element, paramName, productId, placeHolderTypeId, GLAccountType.INCOME, PortfolioProductType.LOAN);
}
public void saveLoanToExpenseAccountMapping(final JsonElement element, final String paramName, final Long productId,
final int placeHolderTypeId) {
saveProductToAccountMapping(element, paramName, productId, placeHolderTypeId, GLAccountType.EXPENSE, PortfolioProductType.LOAN);
}
public void saveLoanToLiabilityAccountMapping(final JsonElement element, final String paramName, final Long productId,
final int placeHolderTypeId) {
saveProductToAccountMapping(element, paramName, productId, placeHolderTypeId, GLAccountType.LIABILITY, PortfolioProductType.LOAN);
}
/***
* Set of abstractions for merging Savings Products to GL Account Mappings
***/
public void mergeLoanToAssetAccountMappingChanges(final JsonElement element, final String paramName, final Long productId,
final int accountTypeId, final String accountTypeName, final Map<String, Object> changes) {
mergeProductToAccountMappingChanges(element, paramName, productId, accountTypeId, accountTypeName, changes, GLAccountType.ASSET,
PortfolioProductType.LOAN);
}
public void mergeLoanToAssetOrLiabilityAccountMappingChanges(final JsonElement element, final String paramName, final Long productId,
final int accountTypeId, final String accountTypeName, final Map<String, Object> changes) {
GLAccountType glAccountType = getGLAccountType(element, paramName, ASSET_LIABILITY_TYPES);
if (glAccountType != null) {
mergeProductToAccountMappingChanges(element, paramName, productId, accountTypeId, accountTypeName, changes, glAccountType,
PortfolioProductType.LOAN);
}
}
public void mergeLoanToIncomeAccountMappingChanges(final JsonElement element, final String paramName, final Long productId,
final int accountTypeId, final String accountTypeName, final Map<String, Object> changes) {
mergeProductToAccountMappingChanges(element, paramName, productId, accountTypeId, accountTypeName, changes, GLAccountType.INCOME,
PortfolioProductType.LOAN);
}
public void mergeLoanToExpenseAccountMappingChanges(final JsonElement element, final String paramName, final Long productId,
final int accountTypeId, final String accountTypeName, final Map<String, Object> changes) {
mergeProductToAccountMappingChanges(element, paramName, productId, accountTypeId, accountTypeName, changes, GLAccountType.EXPENSE,
PortfolioProductType.LOAN);
}
public void mergeLoanToLiabilityAccountMappingChanges(final JsonElement element, final String paramName, final Long productId,
final int accountTypeId, final String accountTypeName, final Map<String, Object> changes) {
mergeProductToAccountMappingChanges(element, paramName, productId, accountTypeId, accountTypeName, changes, GLAccountType.LIABILITY,
PortfolioProductType.LOAN);
}
/*** Abstractions for payments channel related to loan products ***/
public void savePaymentChannelToFundSourceMappings(final JsonCommand command, final JsonElement element, final Long productId,
final Map<String, Object> changes) {
savePaymentChannelToFundSourceMappings(command, element, productId, changes, PortfolioProductType.LOAN);
}
public void updatePaymentChannelToFundSourceMappings(final JsonCommand command, final JsonElement element, final Long productId,
final Map<String, Object> changes) {
updatePaymentChannelToFundSourceMappings(command, element, productId, changes, PortfolioProductType.LOAN);
}
public void saveChargesToIncomeAccountMappings(final JsonCommand command, final JsonElement element, final Long productId,
final Map<String, Object> changes) {
// save both fee and penalty charges
saveChargesToGLAccountMappings(command, element, productId, changes, PortfolioProductType.LOAN, true);
saveChargesToGLAccountMappings(command, element, productId, changes, PortfolioProductType.LOAN, false);
}
public void saveChargeOffReasonToExpenseAccountMappings(final JsonCommand command, final JsonElement element, final Long productId,
final Map<String, Object> changes) {
saveReasonToGLAccountMappings(command, element, productId, changes, PortfolioProductType.LOAN,
LoanProductAccountingParams.CHARGE_OFF_REASON_TO_EXPENSE_ACCOUNT_MAPPINGS,
LoanProductAccountingParams.CHARGE_OFF_REASON_CODE_VALUE_ID, CashAccountsForLoan.CHARGE_OFF_EXPENSE);
}
public void saveWriteOffReasonToExpenseAccountMappings(final JsonCommand command, final JsonElement element, final Long productId,
final Map<String, Object> changes) {
saveReasonToGLAccountMappings(command, element, productId, changes, PortfolioProductType.LOAN,
LoanProductAccountingParams.WRITE_OFF_REASON_TO_EXPENSE_ACCOUNT_MAPPINGS,
LoanProductAccountingParams.WRITE_OFF_REASON_CODE_VALUE_ID, CashAccountsForLoan.LOSSES_WRITTEN_OFF);
}
public void updateWriteOffReasonToExpenseAccountMappings(final JsonCommand command, final JsonElement element, final Long productId,
final Map<String, Object> changes) {
final List<ProductToGLAccountMapping> existingWriteOffReasonToGLAccountMappings = this.accountMappingRepository
.findAllWriteOffReasonsMappings(productId, PortfolioProductType.LOAN.getValue());
LoanProductAccountingParams reasonToExpenseAccountMappingsParam = LoanProductAccountingParams.WRITE_OFF_REASON_TO_EXPENSE_ACCOUNT_MAPPINGS;
LoanProductAccountingParams reasonCodeValueIdParam = LoanProductAccountingParams.WRITE_OFF_REASON_CODE_VALUE_ID;
CashAccountsForLoan cashAccountsForLoan = CashAccountsForLoan.LOSSES_WRITTEN_OFF;
updateReasonToGLAccountMappings(command, element, productId, changes, PortfolioProductType.LOAN,
existingWriteOffReasonToGLAccountMappings, reasonToExpenseAccountMappingsParam, reasonCodeValueIdParam,
cashAccountsForLoan);
}
public void updateChargeOffReasonToExpenseAccountMappings(final JsonCommand command, final JsonElement element, final Long productId,
final Map<String, Object> changes) {
final List<ProductToGLAccountMapping> chargeOffReasonsMappings = this.accountMappingRepository
.findAllChargeOffReasonsMappings(productId, PortfolioProductType.LOAN.getValue());
LoanProductAccountingParams reasonToExpenseAccountMappingsParam = LoanProductAccountingParams.CHARGE_OFF_REASON_TO_EXPENSE_ACCOUNT_MAPPINGS;
LoanProductAccountingParams reasonCodeValueIdParam = LoanProductAccountingParams.CHARGE_OFF_REASON_CODE_VALUE_ID;
CashAccountsForLoan cashAccountsForLoan = CashAccountsForLoan.CHARGE_OFF_EXPENSE;
updateReasonToGLAccountMappings(command, element, productId, changes, PortfolioProductType.LOAN, chargeOffReasonsMappings,
reasonToExpenseAccountMappingsParam, reasonCodeValueIdParam, cashAccountsForLoan);
}
public void saveCapitalizedIncomeClassificationToIncomeAccountMappings(final JsonCommand command, final JsonElement element,
final Long productId, final Map<String, Object> changes) {
saveClassificationToGLAccountMappings(command, element, productId, changes, PortfolioProductType.LOAN,
LoanProductAccountingParams.CAPITALIZED_INCOME_CLASSIFICATION_TO_INCOME_ACCOUNT_MAPPINGS);
}
public void updateCapitalizedIncomeClassificationToIncomeAccountMappings(final JsonCommand command, final JsonElement element,
final Long productId, final Map<String, Object> changes) {
updateClassificationToGLAccountMappings(command, element, productId, changes, PortfolioProductType.LOAN,
LoanProductAccountingParams.CAPITALIZED_INCOME_CLASSIFICATION_TO_INCOME_ACCOUNT_MAPPINGS);
}
public void saveBuyDownFeeClassificationToIncomeAccountMappings(final JsonCommand command, final JsonElement element,
final Long productId, final Map<String, Object> changes) {
saveClassificationToGLAccountMappings(command, element, productId, changes, PortfolioProductType.LOAN,
LoanProductAccountingParams.BUYDOWN_FEE_CLASSIFICATION_TO_INCOME_ACCOUNT_MAPPINGS);
}
public void updateBuyDownFeeClassificationToIncomeAccountMappings(final JsonCommand command, final JsonElement element,
final Long productId, final Map<String, Object> changes) {
updateClassificationToGLAccountMappings(command, element, productId, changes, PortfolioProductType.LOAN,
LoanProductAccountingParams.BUYDOWN_FEE_CLASSIFICATION_TO_INCOME_ACCOUNT_MAPPINGS);
}
public void updateChargesToIncomeAccountMappings(final JsonCommand command, final JsonElement element, final Long productId,
final Map<String, Object> changes) {
// update both fee and penalty charges
updateChargeToIncomeAccountMappings(command, element, productId, changes, PortfolioProductType.LOAN, true);
updateChargeToIncomeAccountMappings(command, element, productId, changes, PortfolioProductType.LOAN, false);
}
public Map<String, Object> populateChangesForNewLoanProductToGLAccountMappingCreation(final JsonElement element,
final AccountingRuleType accountingRuleType) {
final Map<String, Object> changes = new HashMap<>();
final Long fundAccountId = this.fromApiJsonHelper.extractLongNamed(LoanProductAccountingParams.FUND_SOURCE.getValue(), element);
final Long loanPortfolioAccountId = this.fromApiJsonHelper.extractLongNamed(LoanProductAccountingParams.LOAN_PORTFOLIO.getValue(),
element);
final Long incomeFromInterestId = this.fromApiJsonHelper.extractLongNamed(LoanProductAccountingParams.INTEREST_ON_LOANS.getValue(),
element);
final Long incomeFromFeeId = this.fromApiJsonHelper.extractLongNamed(LoanProductAccountingParams.INCOME_FROM_FEES.getValue(),
element);
final Long incomeFromPenaltyId = this.fromApiJsonHelper
.extractLongNamed(LoanProductAccountingParams.INCOME_FROM_PENALTIES.getValue(), element);
final Long incomeFromRecoveryAccountId = this.fromApiJsonHelper
.extractLongNamed(LoanProductAccountingParams.INCOME_FROM_RECOVERY.getValue(), element);
final Long incomeFromBuyDownFeesAccountId = this.fromApiJsonHelper
.extractLongNamed(LoanProductAccountingParams.INCOME_FROM_BUY_DOWN.getValue(), element);
final Long writeOffAccountId = this.fromApiJsonHelper.extractLongNamed(LoanProductAccountingParams.LOSSES_WRITTEN_OFF.getValue(),
element);
final Long overPaymentAccountId = this.fromApiJsonHelper.extractLongNamed(LoanProductAccountingParams.OVERPAYMENT.getValue(),
element);
final Long transfersInSuspenseAccountId = this.fromApiJsonHelper
.extractLongNamed(LoanProductAccountingParams.TRANSFERS_SUSPENSE.getValue(), element);
final Long receivableInterestAccountId = this.fromApiJsonHelper
.extractLongNamed(LoanProductAccountingParams.INTEREST_RECEIVABLE.getValue(), element);
final Long receivableFeeAccountId = this.fromApiJsonHelper.extractLongNamed(LoanProductAccountingParams.FEES_RECEIVABLE.getValue(),
element);
final Long receivablePenaltyAccountId = this.fromApiJsonHelper
.extractLongNamed(LoanProductAccountingParams.PENALTIES_RECEIVABLE.getValue(), element);
switch (accountingRuleType) {
case NONE:
break;
case CASH_BASED:
populateChangesForCashBasedAccounting(changes, fundAccountId, loanPortfolioAccountId, incomeFromInterestId, incomeFromFeeId,
incomeFromPenaltyId, writeOffAccountId, overPaymentAccountId, transfersInSuspenseAccountId,
incomeFromRecoveryAccountId);
break;
case ACCRUAL_PERIODIC:
populateChangesForAccrualBasedAccounting(changes, fundAccountId, loanPortfolioAccountId, incomeFromInterestId,
incomeFromFeeId, incomeFromPenaltyId, writeOffAccountId, overPaymentAccountId, transfersInSuspenseAccountId,
incomeFromRecoveryAccountId, incomeFromBuyDownFeesAccountId, receivableInterestAccountId, receivableFeeAccountId,
receivablePenaltyAccountId);
break;
case ACCRUAL_UPFRONT:
populateChangesForAccrualBasedAccounting(changes, fundAccountId, loanPortfolioAccountId, incomeFromInterestId,
incomeFromFeeId, incomeFromPenaltyId, writeOffAccountId, overPaymentAccountId, transfersInSuspenseAccountId,
incomeFromRecoveryAccountId, incomeFromBuyDownFeesAccountId, receivableInterestAccountId, receivableFeeAccountId,
receivablePenaltyAccountId);
break;
}
return changes;
}
private void populateChangesForAccrualBasedAccounting(final Map<String, Object> changes, final Long fundAccountId,
final Long loanPortfolioAccountId, final Long incomeFromInterestId, final Long incomeFromFeeId, final Long incomeFromPenaltyId,
final Long writeOffAccountId, final Long overPaymentAccountId, final Long transfersInSuspenseAccountId,
final Long incomeFromRecoveryAccountId, final Long incomeFromBuyDownFeesAccountId, final Long receivableInterestAccountId,
final Long receivableFeeAccountId, final Long receivablePenaltyAccountId) {
changes.put(LoanProductAccountingParams.INTEREST_RECEIVABLE.getValue(), receivableInterestAccountId);
changes.put(LoanProductAccountingParams.FEES_RECEIVABLE.getValue(), receivableFeeAccountId);
changes.put(LoanProductAccountingParams.PENALTIES_RECEIVABLE.getValue(), receivablePenaltyAccountId);
populateChangesForCashBasedAccounting(changes, fundAccountId, loanPortfolioAccountId, incomeFromInterestId, incomeFromFeeId,
incomeFromPenaltyId, writeOffAccountId, overPaymentAccountId, transfersInSuspenseAccountId, incomeFromRecoveryAccountId);
}
private void populateChangesForCashBasedAccounting(final Map<String, Object> changes, final Long fundAccountId,
final Long loanPortfolioAccountId, final Long incomeFromInterestId, final Long incomeFromFeeId, final Long incomeFromPenaltyId,
final Long writeOffAccountId, final Long overPaymentAccountId, final Long transfersInSuspenseAccountId,
final Long incomeFromRecoveryAccountId) {
changes.put(LoanProductAccountingParams.FUND_SOURCE.getValue(), fundAccountId);
changes.put(LoanProductAccountingParams.LOAN_PORTFOLIO.getValue(), loanPortfolioAccountId);
changes.put(LoanProductAccountingParams.INTEREST_ON_LOANS.getValue(), incomeFromInterestId);
changes.put(LoanProductAccountingParams.INCOME_FROM_FEES.getValue(), incomeFromFeeId);
changes.put(LoanProductAccountingParams.INCOME_FROM_PENALTIES.getValue(), incomeFromPenaltyId);
changes.put(LoanProductAccountingParams.LOSSES_WRITTEN_OFF.getValue(), writeOffAccountId);
changes.put(LoanProductAccountingParams.OVERPAYMENT.getValue(), overPaymentAccountId);
changes.put(LoanProductAccountingParams.TRANSFERS_SUSPENSE.getValue(), transfersInSuspenseAccountId);
changes.put(LoanProductAccountingParams.INCOME_FROM_RECOVERY.getValue(), incomeFromRecoveryAccountId);
}
/**
* Examines and updates each account mapping for given loan product with changes passed in from the Json element
*
* @param loanProductId
* @param changes
* @param element
* @param accountingRuleType
*/
public void handleChangesToLoanProductToGLAccountMappings(final Long loanProductId, final Map<String, Object> changes,
final JsonElement element, final AccountingRuleType accountingRuleType, final boolean enableIncomeCapitalization,
final boolean enableBuyDownFee, final boolean merchantBuyDownFee) {
switch (accountingRuleType) {
case NONE:
break;
case CASH_BASED:
// asset or liabilities
mergeLoanToAssetOrLiabilityAccountMappingChanges(element, LoanProductAccountingParams.FUND_SOURCE.getValue(), loanProductId,
CashAccountsForLoan.FUND_SOURCE.getValue(), CashAccountsForLoan.FUND_SOURCE.toString(), changes);
// asset
mergeLoanToAssetAccountMappingChanges(element, LoanProductAccountingParams.LOAN_PORTFOLIO.getValue(), loanProductId,
CashAccountsForLoan.LOAN_PORTFOLIO.getValue(), CashAccountsForLoan.LOAN_PORTFOLIO.toString(), changes);
mergeLoanToAssetAccountMappingChanges(element, LoanProductAccountingParams.TRANSFERS_SUSPENSE.getValue(), loanProductId,
CashAccountsForLoan.TRANSFERS_SUSPENSE.getValue(), CashAccountsForLoan.TRANSFERS_SUSPENSE.toString(), changes);
// income
mergeLoanToIncomeAccountMappingChanges(element, LoanProductAccountingParams.INTEREST_ON_LOANS.getValue(), loanProductId,
CashAccountsForLoan.INTEREST_ON_LOANS.getValue(), CashAccountsForLoan.INTEREST_ON_LOANS.toString(), changes);
mergeLoanToIncomeAccountMappingChanges(element, LoanProductAccountingParams.INCOME_FROM_FEES.getValue(), loanProductId,
CashAccountsForLoan.INCOME_FROM_FEES.getValue(), CashAccountsForLoan.INCOME_FROM_FEES.toString(), changes);
mergeLoanToIncomeAccountMappingChanges(element, LoanProductAccountingParams.INCOME_FROM_PENALTIES.getValue(), loanProductId,
CashAccountsForLoan.INCOME_FROM_PENALTIES.getValue(), CashAccountsForLoan.INCOME_FROM_PENALTIES.toString(),
changes);
mergeLoanToIncomeAccountMappingChanges(element, LoanProductAccountingParams.INCOME_FROM_RECOVERY.getValue(), loanProductId,
CashAccountsForLoan.INCOME_FROM_RECOVERY.getValue(), CashAccountsForLoan.INCOME_FROM_RECOVERY.toString(), changes);
mergeLoanToIncomeAccountMappingChanges(element, LoanProductAccountingParams.INCOME_FROM_CHARGE_OFF_FEES.getValue(),
loanProductId, CashAccountsForLoan.INCOME_FROM_CHARGE_OFF_FEES.getValue(),
CashAccountsForLoan.INCOME_FROM_CHARGE_OFF_FEES.toString(), changes);
mergeLoanToIncomeAccountMappingChanges(element, LoanProductAccountingParams.INCOME_FROM_CHARGE_OFF_INTEREST.getValue(),
loanProductId, CashAccountsForLoan.INCOME_FROM_CHARGE_OFF_INTEREST.getValue(),
CashAccountsForLoan.INCOME_FROM_CHARGE_OFF_INTEREST.toString(), changes);
mergeLoanToIncomeAccountMappingChanges(element, LoanProductAccountingParams.INCOME_FROM_CHARGE_OFF_PENALTY.getValue(),
loanProductId, CashAccountsForLoan.INCOME_FROM_CHARGE_OFF_PENALTY.getValue(),
CashAccountsForLoan.INCOME_FROM_CHARGE_OFF_PENALTY.toString(), changes);
mergeLoanToIncomeAccountMappingChanges(element, LoanProductAccountingParams.INCOME_FROM_GOODWILL_CREDIT_INTEREST.getValue(),
loanProductId, CashAccountsForLoan.INCOME_FROM_GOODWILL_CREDIT_INTEREST.getValue(),
CashAccountsForLoan.INCOME_FROM_GOODWILL_CREDIT_INTEREST.toString(), changes);
mergeLoanToIncomeAccountMappingChanges(element, LoanProductAccountingParams.INCOME_FROM_GOODWILL_CREDIT_FEES.getValue(),
loanProductId, CashAccountsForLoan.INCOME_FROM_GOODWILL_CREDIT_FEES.getValue(),
CashAccountsForLoan.INCOME_FROM_GOODWILL_CREDIT_FEES.toString(), changes);
mergeLoanToIncomeAccountMappingChanges(element, LoanProductAccountingParams.INCOME_FROM_GOODWILL_CREDIT_PENALTY.getValue(),
loanProductId, CashAccountsForLoan.INCOME_FROM_GOODWILL_CREDIT_PENALTY.getValue(),
CashAccountsForLoan.INCOME_FROM_GOODWILL_CREDIT_PENALTY.toString(), changes);
// expenses
mergeLoanToExpenseAccountMappingChanges(element, LoanProductAccountingParams.LOSSES_WRITTEN_OFF.getValue(), loanProductId,
CashAccountsForLoan.LOSSES_WRITTEN_OFF.getValue(), CashAccountsForLoan.LOSSES_WRITTEN_OFF.toString(), changes);
mergeLoanToExpenseAccountMappingChanges(element, LoanProductAccountingParams.GOODWILL_CREDIT.getValue(), loanProductId,
CashAccountsForLoan.GOODWILL_CREDIT.getValue(), CashAccountsForLoan.GOODWILL_CREDIT.toString(), changes);
mergeLoanToExpenseAccountMappingChanges(element, LoanProductAccountingParams.CHARGE_OFF_EXPENSE.getValue(), loanProductId,
CashAccountsForLoan.CHARGE_OFF_EXPENSE.getValue(), CashAccountsForLoan.CHARGE_OFF_EXPENSE.toString(), changes);
mergeLoanToExpenseAccountMappingChanges(element, LoanProductAccountingParams.CHARGE_OFF_FRAUD_EXPENSE.getValue(),
loanProductId, CashAccountsForLoan.CHARGE_OFF_FRAUD_EXPENSE.getValue(),
CashAccountsForLoan.CHARGE_OFF_FRAUD_EXPENSE.toString(), changes);
// liabilities
mergeLoanToLiabilityAccountMappingChanges(element, LoanProductAccountingParams.OVERPAYMENT.getValue(), loanProductId,
CashAccountsForLoan.OVERPAYMENT.getValue(), CashAccountsForLoan.OVERPAYMENT.toString(), changes);
break;
case ACCRUAL_UPFRONT:
// fall through to periodic accrual
case ACCRUAL_PERIODIC:
// asset or liabilities
mergeLoanToAssetOrLiabilityAccountMappingChanges(element, LoanProductAccountingParams.FUND_SOURCE.getValue(), loanProductId,
CashAccountsForLoan.FUND_SOURCE.getValue(), CashAccountsForLoan.FUND_SOURCE.toString(), changes);
// assets (including receivables)
mergeLoanToAssetAccountMappingChanges(element, LoanProductAccountingParams.LOAN_PORTFOLIO.getValue(), loanProductId,
AccrualAccountsForLoan.LOAN_PORTFOLIO.getValue(), AccrualAccountsForLoan.LOAN_PORTFOLIO.toString(), changes);
mergeLoanToAssetAccountMappingChanges(element, LoanProductAccountingParams.TRANSFERS_SUSPENSE.getValue(), loanProductId,
AccrualAccountsForLoan.TRANSFERS_SUSPENSE.getValue(), AccrualAccountsForLoan.TRANSFERS_SUSPENSE.toString(),
changes);
mergeLoanToAssetAccountMappingChanges(element, LoanProductAccountingParams.INTEREST_RECEIVABLE.getValue(), loanProductId,
AccrualAccountsForLoan.INTEREST_RECEIVABLE.getValue(), AccrualAccountsForLoan.INTEREST_RECEIVABLE.toString(),
changes);
mergeLoanToAssetAccountMappingChanges(element, LoanProductAccountingParams.FEES_RECEIVABLE.getValue(), loanProductId,
AccrualAccountsForLoan.FEES_RECEIVABLE.getValue(), AccrualAccountsForLoan.FEES_RECEIVABLE.toString(), changes);
mergeLoanToAssetAccountMappingChanges(element, LoanProductAccountingParams.PENALTIES_RECEIVABLE.getValue(), loanProductId,
AccrualAccountsForLoan.PENALTIES_RECEIVABLE.getValue(), AccrualAccountsForLoan.PENALTIES_RECEIVABLE.toString(),
changes);
// income
mergeLoanToIncomeAccountMappingChanges(element, LoanProductAccountingParams.INTEREST_ON_LOANS.getValue(), loanProductId,
AccrualAccountsForLoan.INTEREST_ON_LOANS.getValue(), AccrualAccountsForLoan.INTEREST_ON_LOANS.toString(), changes);
mergeLoanToIncomeAccountMappingChanges(element, LoanProductAccountingParams.INCOME_FROM_FEES.getValue(), loanProductId,
AccrualAccountsForLoan.INCOME_FROM_FEES.getValue(), AccrualAccountsForLoan.INCOME_FROM_FEES.toString(), changes);
mergeLoanToIncomeAccountMappingChanges(element, LoanProductAccountingParams.INCOME_FROM_PENALTIES.getValue(), loanProductId,
AccrualAccountsForLoan.INCOME_FROM_PENALTIES.getValue(), AccrualAccountsForLoan.INCOME_FROM_PENALTIES.toString(),
changes);
mergeLoanToIncomeAccountMappingChanges(element, LoanProductAccountingParams.INCOME_FROM_RECOVERY.getValue(), loanProductId,
AccrualAccountsForLoan.INCOME_FROM_RECOVERY.getValue(), AccrualAccountsForLoan.INCOME_FROM_RECOVERY.toString(),
changes);
mergeLoanToIncomeAccountMappingChanges(element, LoanProductAccountingParams.INCOME_FROM_CHARGE_OFF_FEES.getValue(),
loanProductId, AccrualAccountsForLoan.INCOME_FROM_CHARGE_OFF_FEES.getValue(),
AccrualAccountsForLoan.INCOME_FROM_CHARGE_OFF_FEES.toString(), changes);
mergeLoanToIncomeAccountMappingChanges(element, LoanProductAccountingParams.INCOME_FROM_CHARGE_OFF_INTEREST.getValue(),
loanProductId, AccrualAccountsForLoan.INCOME_FROM_CHARGE_OFF_INTEREST.getValue(),
AccrualAccountsForLoan.INCOME_FROM_CHARGE_OFF_INTEREST.toString(), changes);
mergeLoanToIncomeAccountMappingChanges(element, LoanProductAccountingParams.INCOME_FROM_CHARGE_OFF_PENALTY.getValue(),
loanProductId, AccrualAccountsForLoan.INCOME_FROM_CHARGE_OFF_PENALTY.getValue(),
AccrualAccountsForLoan.INCOME_FROM_CHARGE_OFF_PENALTY.toString(), changes);
mergeLoanToIncomeAccountMappingChanges(element, LoanProductAccountingParams.INCOME_FROM_GOODWILL_CREDIT_INTEREST.getValue(),
loanProductId, AccrualAccountsForLoan.INCOME_FROM_GOODWILL_CREDIT_INTEREST.getValue(),
AccrualAccountsForLoan.INCOME_FROM_GOODWILL_CREDIT_INTEREST.toString(), changes);
mergeLoanToIncomeAccountMappingChanges(element, LoanProductAccountingParams.INCOME_FROM_GOODWILL_CREDIT_FEES.getValue(),
loanProductId, AccrualAccountsForLoan.INCOME_FROM_GOODWILL_CREDIT_FEES.getValue(),
AccrualAccountsForLoan.INCOME_FROM_GOODWILL_CREDIT_FEES.toString(), changes);
mergeLoanToIncomeAccountMappingChanges(element, LoanProductAccountingParams.INCOME_FROM_GOODWILL_CREDIT_PENALTY.getValue(),
loanProductId, AccrualAccountsForLoan.INCOME_FROM_GOODWILL_CREDIT_PENALTY.getValue(),
AccrualAccountsForLoan.INCOME_FROM_GOODWILL_CREDIT_PENALTY.toString(), changes);
if (!enableIncomeCapitalization) {
deleteProductToGLAccountMapping(loanProductId, PortfolioProductType.LOAN,
AccrualAccountsForLoan.INCOME_FROM_CAPITALIZATION.getValue());
} else {
mergeLoanToIncomeAccountMappingChanges(element, LoanProductAccountingParams.INCOME_FROM_CAPITALIZATION.getValue(),
loanProductId, AccrualAccountsForLoan.INCOME_FROM_CAPITALIZATION.getValue(),
AccrualAccountsForLoan.INCOME_FROM_CAPITALIZATION.toString(), changes);
}
if (!enableBuyDownFee) {
deleteProductToGLAccountMapping(loanProductId, PortfolioProductType.LOAN,
AccrualAccountsForLoan.INCOME_FROM_BUY_DOWN.getValue());
} else {
mergeLoanToIncomeAccountMappingChanges(element, LoanProductAccountingParams.INCOME_FROM_BUY_DOWN.getValue(),
loanProductId, AccrualAccountsForLoan.INCOME_FROM_BUY_DOWN.getValue(),
AccrualAccountsForLoan.INCOME_FROM_BUY_DOWN.toString(), changes);
}
// expenses
mergeLoanToExpenseAccountMappingChanges(element, LoanProductAccountingParams.LOSSES_WRITTEN_OFF.getValue(), loanProductId,
AccrualAccountsForLoan.LOSSES_WRITTEN_OFF.getValue(), AccrualAccountsForLoan.LOSSES_WRITTEN_OFF.toString(),
changes);
mergeLoanToExpenseAccountMappingChanges(element, LoanProductAccountingParams.GOODWILL_CREDIT.getValue(), loanProductId,
AccrualAccountsForLoan.GOODWILL_CREDIT.getValue(), AccrualAccountsForLoan.GOODWILL_CREDIT.toString(), changes);
mergeLoanToExpenseAccountMappingChanges(element, LoanProductAccountingParams.CHARGE_OFF_EXPENSE.getValue(), loanProductId,
AccrualAccountsForLoan.CHARGE_OFF_EXPENSE.getValue(), AccrualAccountsForLoan.CHARGE_OFF_EXPENSE.toString(),
changes);
mergeLoanToExpenseAccountMappingChanges(element, LoanProductAccountingParams.CHARGE_OFF_FRAUD_EXPENSE.getValue(),
loanProductId, AccrualAccountsForLoan.CHARGE_OFF_FRAUD_EXPENSE.getValue(),
AccrualAccountsForLoan.CHARGE_OFF_FRAUD_EXPENSE.toString(), changes);
if (!enableBuyDownFee) {
deleteProductToGLAccountMapping(loanProductId, PortfolioProductType.LOAN,
AccrualAccountsForLoan.BUY_DOWN_EXPENSE.getValue());
} else {
if (merchantBuyDownFee) {
mergeLoanToExpenseAccountMappingChanges(element, LoanProductAccountingParams.BUY_DOWN_EXPENSE.getValue(),
loanProductId, AccrualAccountsForLoan.BUY_DOWN_EXPENSE.getValue(),
AccrualAccountsForLoan.BUY_DOWN_EXPENSE.toString(), changes);
}
}
// liabilities
mergeLoanToLiabilityAccountMappingChanges(element, LoanProductAccountingParams.OVERPAYMENT.getValue(), loanProductId,
CashAccountsForLoan.OVERPAYMENT.getValue(), CashAccountsForLoan.OVERPAYMENT.toString(), changes);
if (!enableBuyDownFee && !enableIncomeCapitalization) {
deleteProductToGLAccountMapping(loanProductId, PortfolioProductType.LOAN,
AccrualAccountsForLoan.DEFERRED_INCOME_LIABILITY.getValue());
} else {
mergeLoanToLiabilityAccountMappingChanges(element, LoanProductAccountingParams.DEFERRED_INCOME_LIABILITY.getValue(),
loanProductId, AccrualAccountsForLoan.DEFERRED_INCOME_LIABILITY.getValue(),
AccrualAccountsForLoan.DEFERRED_INCOME_LIABILITY.toString(), changes);
}
break;
}
}
public void deleteLoanProductToGLAccountMapping(final Long loanProductId) {
deleteProductToGLAccountMapping(loanProductId, PortfolioProductType.LOAN);
}
private GLAccountType getGLAccountType(final JsonElement element, final String paramName, final List<GLAccountType> allowedTypes) {
GLAccountType gLAccountType = null;
final Long accountId = this.fromApiJsonHelper.extractLongNamed(paramName, element);
if (accountId != null) {
final GLAccount glAccount = getAccountById(paramName, accountId);
gLAccountType = GLAccountType.fromInt(glAccount.getType());
if (!allowedTypes.contains(gLAccountType)) {
throw new ProductToGLAccountMappingInvalidException(paramName, glAccount.getName(), accountId, gLAccountType.toString(),
GLAccountType.ASSET.getCode() + " or " + GLAccountType.LIABILITY.getCode());
}
}
return gLAccountType;
}
}
|
apache/iotdb | 36,165 | iotdb-core/datanode/src/test/java/org/apache/iotdb/db/storageengine/dataregion/compaction/cross/RewriteCrossSpaceCompactionWithReadPointPerformerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iotdb.db.storageengine.dataregion.compaction.cross;
import org.apache.iotdb.commons.concurrent.ExceptionalCountDownLatch;
import org.apache.iotdb.commons.exception.IllegalPathException;
import org.apache.iotdb.commons.exception.MetadataException;
import org.apache.iotdb.commons.path.AlignedFullPath;
import org.apache.iotdb.commons.path.IFullPath;
import org.apache.iotdb.commons.path.MeasurementPath;
import org.apache.iotdb.db.conf.IoTDBDescriptor;
import org.apache.iotdb.db.exception.StorageEngineException;
import org.apache.iotdb.db.queryengine.execution.fragment.FragmentInstanceContext;
import org.apache.iotdb.db.queryengine.plan.planner.plan.node.PlanNodeId;
import org.apache.iotdb.db.queryengine.plan.planner.plan.node.write.DeleteDataNode;
import org.apache.iotdb.db.storageengine.dataregion.DataRegion;
import org.apache.iotdb.db.storageengine.dataregion.compaction.AbstractCompactionTest;
import org.apache.iotdb.db.storageengine.dataregion.compaction.execute.performer.impl.ReadPointCompactionPerformer;
import org.apache.iotdb.db.storageengine.dataregion.compaction.execute.task.CrossSpaceCompactionTask;
import org.apache.iotdb.db.storageengine.dataregion.compaction.execute.utils.reader.IDataBlockReader;
import org.apache.iotdb.db.storageengine.dataregion.compaction.execute.utils.reader.SeriesDataBlockReader;
import org.apache.iotdb.db.storageengine.dataregion.compaction.utils.CompactionFileGeneratorUtils;
import org.apache.iotdb.db.storageengine.dataregion.flush.TsFileFlushPolicy;
import org.apache.iotdb.db.storageengine.dataregion.read.control.FileReaderManager;
import org.apache.iotdb.db.storageengine.dataregion.tsfile.TsFileManager;
import org.apache.iotdb.db.storageengine.dataregion.tsfile.TsFileResource;
import org.apache.iotdb.db.storageengine.dataregion.tsfile.TsFileResourceStatus;
import org.apache.iotdb.db.storageengine.dataregion.tsfile.generator.TsFileNameGenerator;
import org.apache.iotdb.db.storageengine.dataregion.wal.recover.WALRecoverManager;
import org.apache.iotdb.db.utils.EnvironmentUtils;
import org.apache.tsfile.common.conf.TSFileDescriptor;
import org.apache.tsfile.common.constant.TsFileConstant;
import org.apache.tsfile.enums.TSDataType;
import org.apache.tsfile.exception.write.WriteProcessException;
import org.apache.tsfile.file.metadata.IDeviceID;
import org.apache.tsfile.read.common.block.TsBlock;
import org.apache.tsfile.utils.Pair;
import org.apache.tsfile.utils.TsFileGeneratorUtils;
import org.apache.tsfile.write.schema.IMeasurementSchema;
import org.apache.tsfile.write.schema.MeasurementSchema;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.apache.iotdb.commons.conf.IoTDBConstant.CROSS_COMPACTION_TMP_FILE_SUFFIX;
import static org.apache.iotdb.commons.conf.IoTDBConstant.PATH_SEPARATOR;
import static org.junit.Assert.assertEquals;
public class RewriteCrossSpaceCompactionWithReadPointPerformerTest extends AbstractCompactionTest {
private final String oldThreadName = Thread.currentThread().getName();
@Before
public void setUp()
throws IOException, WriteProcessException, MetadataException, InterruptedException {
super.setUp();
WALRecoverManager.getInstance().setAllDataRegionScannedLatch(new ExceptionalCountDownLatch(1));
IoTDBDescriptor.getInstance().getConfig().setTargetChunkSize(1024);
Thread.currentThread().setName("pool-1-IoTDB-Compaction-Worker-1");
}
@After
public void tearDown() throws IOException, StorageEngineException {
super.tearDown();
Thread.currentThread().setName(oldThreadName);
FileReaderManager.getInstance().closeAndRemoveAllOpenedReaders();
}
/**
* Total 4 seq files and 5 unseq files, each file has different aligned timeseries.
*
* <p>Seq files<br>
* first and second file has d0 ~ d1 and s0 ~ s2, time range is 0 ~ 299 and 350 ~ 649, value range
* is 0 ~ 299 and 350 ~ 649.<br>
* third and forth file has d0 ~ d3 and s0 ~ S4,time range is 700 ~ 999 and 1050 ~ 1349, value
* range is 700 ~ 999 and 1050 ~ 1349.<br>
*
* <p>UnSeq files<br>
* first, second and third file has d0 ~ d2 and s0 ~ s3, time range is 20 ~ 219, 250 ~ 449 and 480
* ~ 679, value range is 10020 ~ 10219, 10250 ~ 10449 and 10480 ~ 10679.<br>
* forth and fifth file has d0 and s0 ~ s4, time range is 450 ~ 549 and 550 ~ 649, value range is
* 20450 ~ 20549 and 20550 ~ 20649.
*
* <p>The data of d0.s0, d0.s1, d2.s4 and d3.s4 is deleted in each file.
*/
@Test
public void testAlignedCrossSpaceCompactionWithAllDataDeletedInTimeseries() throws Exception {
TSFileDescriptor.getInstance().getConfig().setMaxNumberOfPointsInPage(30);
registerTimeseriesInMManger(4, 5, true);
createFiles(2, 2, 3, 300, 0, 0, 50, 50, true, true);
createFiles(2, 4, 5, 300, 700, 700, 50, 50, true, true);
createFiles(3, 3, 4, 200, 20, 10020, 30, 30, true, false);
createFiles(2, 1, 5, 100, 450, 20450, 0, 0, true, false);
// generate mods file
List<String> seriesPaths = new ArrayList<>();
seriesPaths.add(
COMPACTION_TEST_SG
+ PATH_SEPARATOR
+ "d"
+ TsFileGeneratorUtils.getAlignDeviceOffset()
+ PATH_SEPARATOR
+ "s0");
seriesPaths.add(
COMPACTION_TEST_SG
+ PATH_SEPARATOR
+ "d"
+ TsFileGeneratorUtils.getAlignDeviceOffset()
+ PATH_SEPARATOR
+ "s1");
seriesPaths.add(
COMPACTION_TEST_SG
+ PATH_SEPARATOR
+ "d"
+ (TsFileGeneratorUtils.getAlignDeviceOffset() + 2)
+ PATH_SEPARATOR
+ "s4");
seriesPaths.add(
COMPACTION_TEST_SG
+ PATH_SEPARATOR
+ "d"
+ (TsFileGeneratorUtils.getAlignDeviceOffset() + 3)
+ PATH_SEPARATOR
+ "s4");
generateModsFile(seriesPaths, seqResources, Long.MIN_VALUE, Long.MAX_VALUE, false);
generateModsFile(seriesPaths, unseqResources, Long.MIN_VALUE, Long.MAX_VALUE, false);
generateModsFile(seriesPaths, seqResources, Long.MIN_VALUE, Long.MAX_VALUE, true);
generateModsFile(seriesPaths, unseqResources, Long.MIN_VALUE, Long.MAX_VALUE, true);
for (int i = TsFileGeneratorUtils.getAlignDeviceOffset();
i < TsFileGeneratorUtils.getAlignDeviceOffset() + 4;
i++) {
for (int j = 0; j < 5; j++) {
List<IMeasurementSchema> schemas = new ArrayList<>();
schemas.add(new MeasurementSchema("s" + j, TSDataType.INT64));
IFullPath path =
new AlignedFullPath(
IDeviceID.Factory.DEFAULT_FACTORY.create(
COMPACTION_TEST_SG + PATH_SEPARATOR + "d" + i),
Collections.singletonList("s" + j),
schemas);
IDataBlockReader tsFilesReader =
new SeriesDataBlockReader(
path,
FragmentInstanceContext.createFragmentInstanceContextForCompaction(
EnvironmentUtils.TEST_QUERY_CONTEXT.getQueryId()),
seqResources,
unseqResources,
true);
int count = 0;
while (tsFilesReader.hasNextBatch()) {
TsBlock batchData = tsFilesReader.nextBatch();
for (int readIndex = 0, size = batchData.getPositionCount();
readIndex < size;
readIndex++) {
long currentTime = batchData.getTimeByIndex(readIndex);
long currentValue = batchData.getColumn(0).getLong(readIndex);
if (i == TsFileGeneratorUtils.getAlignDeviceOffset()
&& ((450 <= currentTime && currentTime < 550)
|| (550 <= currentTime && currentTime < 650))) {
assertEquals(currentTime + 20000, currentValue);
} else if ((i < TsFileGeneratorUtils.getAlignDeviceOffset() + 3 && j < 4)
&& ((20 <= currentTime && currentTime < 220)
|| (250 <= currentTime && currentTime < 450)
|| (480 <= currentTime && currentTime < 680))) {
assertEquals(currentTime + 10000, currentValue);
} else {
assertEquals(currentTime, currentValue);
}
count++;
}
}
tsFilesReader.close();
if ((i == TsFileGeneratorUtils.getAlignDeviceOffset() && j == 0)
|| (i == TsFileGeneratorUtils.getAlignDeviceOffset() && j == 1)
|| (i == TsFileGeneratorUtils.getAlignDeviceOffset() + 2 && j == 4)
|| (i == TsFileGeneratorUtils.getAlignDeviceOffset() + 3 && j == 4)) {
assertEquals(0, count);
} else if (i < TsFileGeneratorUtils.getAlignDeviceOffset() + 2 && j < 3) {
assertEquals(1280, count);
} else if (i < TsFileGeneratorUtils.getAlignDeviceOffset() + 1 && j < 4) {
assertEquals(1230, count);
} else if (i == TsFileGeneratorUtils.getAlignDeviceOffset()) {
assertEquals(800, count);
} else if ((i == TsFileGeneratorUtils.getAlignDeviceOffset() + 1 && j == 4)) {
assertEquals(600, count);
} else if (i < TsFileGeneratorUtils.getAlignDeviceOffset() + 3 && j < 4) {
assertEquals(1200, count);
} else {
assertEquals(600, count);
}
}
}
List<TsFileResource> targetResources =
CompactionFileGeneratorUtils.getCrossCompactionTargetTsFileResources(seqResources);
TsFileManager tsFileManager =
new TsFileManager(COMPACTION_TEST_SG, "0", STORAGE_GROUP_DIR.getPath());
tsFileManager.addAll(seqResources, true);
tsFileManager.addAll(unseqResources, false);
CrossSpaceCompactionTask task =
new CrossSpaceCompactionTask(
0,
tsFileManager,
seqResources,
unseqResources,
new ReadPointCompactionPerformer(),
0,
0);
task.start();
for (TsFileResource resource : seqResources) {
resource.resetModFile();
Assert.assertFalse(resource.anyModFileExists());
}
for (TsFileResource resource : unseqResources) {
resource.resetModFile();
Assert.assertFalse(resource.anyModFileExists());
}
for (TsFileResource resource : targetResources) {
resource.setFile(
new File(
resource
.getTsFilePath()
.replace(CROSS_COMPACTION_TMP_FILE_SUFFIX, TsFileConstant.TSFILE_SUFFIX)));
resource.resetModFile();
Assert.assertTrue(resource.anyModFileExists());
Assert.assertEquals(4, resource.getAllModEntries().size());
}
FileReaderManager.getInstance().closeAndRemoveAllOpenedReaders();
for (int i = TsFileGeneratorUtils.getAlignDeviceOffset();
i < TsFileGeneratorUtils.getAlignDeviceOffset() + 4;
i++) {
for (int j = 0; j < 5; j++) {
List<IMeasurementSchema> schemas = new ArrayList<>();
schemas.add(new MeasurementSchema("s" + j, TSDataType.INT64));
IFullPath path =
new AlignedFullPath(
IDeviceID.Factory.DEFAULT_FACTORY.create(
COMPACTION_TEST_SG + PATH_SEPARATOR + "d" + i),
Collections.singletonList("s" + j),
schemas);
IDataBlockReader tsFilesReader =
new SeriesDataBlockReader(
path,
FragmentInstanceContext.createFragmentInstanceContextForCompaction(
EnvironmentUtils.TEST_QUERY_CONTEXT.getQueryId()),
tsFileManager.getTsFileList(true),
new ArrayList<>(),
true);
int count = 0;
while (tsFilesReader.hasNextBatch()) {
TsBlock batchData = tsFilesReader.nextBatch();
for (int readIndex = 0, size = batchData.getPositionCount();
readIndex < size;
readIndex++) {
long currentTime = batchData.getTimeByIndex(readIndex);
long currentValue = batchData.getColumn(0).getLong(readIndex);
if (i == TsFileGeneratorUtils.getAlignDeviceOffset()
&& ((450 <= currentTime && currentTime < 550)
|| (550 <= currentTime && currentTime < 650))) {
assertEquals(currentTime + 20000, currentValue);
} else if ((i < TsFileGeneratorUtils.getAlignDeviceOffset() + 3 && j < 4)
&& ((20 <= currentTime && currentTime < 220)
|| (250 <= currentTime && currentTime < 450)
|| (480 <= currentTime && currentTime < 680))) {
assertEquals(currentTime + 10000, currentValue);
} else {
assertEquals(currentTime, currentValue);
}
count++;
}
}
tsFilesReader.close();
if ((i == TsFileGeneratorUtils.getAlignDeviceOffset() && j == 0)
|| (i == TsFileGeneratorUtils.getAlignDeviceOffset() && j == 1)
|| (i == TsFileGeneratorUtils.getAlignDeviceOffset() + 2 && j == 4)
|| (i == TsFileGeneratorUtils.getAlignDeviceOffset() + 3 && j == 4)) {
assertEquals(0, count);
} else if (i < TsFileGeneratorUtils.getAlignDeviceOffset() + 2 && j < 3) {
assertEquals(1280, count);
} else if (i < TsFileGeneratorUtils.getAlignDeviceOffset() + 1 && j < 4) {
assertEquals(1230, count);
} else if (i == TsFileGeneratorUtils.getAlignDeviceOffset()) {
assertEquals(800, count);
} else if ((i == TsFileGeneratorUtils.getAlignDeviceOffset() + 1 && j == 4)) {
assertEquals(600, count);
} else if (i < TsFileGeneratorUtils.getAlignDeviceOffset() + 3 && j < 4) {
assertEquals(1200, count);
} else {
assertEquals(600, count);
}
}
}
}
/**
* Total 4 seq files and 5 unseq files, each file has different aligned timeseries.
*
* <p>Seq files<br>
* first and second file has d0 ~ d1 and s0 ~ s2, time range is 0 ~ 299 and 350 ~ 649, value range
* is 0 ~ 299 and 350 ~ 649.<br>
* third and forth file has d0 ~ d3 and s0 ~ S4,time range is 700 ~ 999 and 1050 ~ 1349, value
* range is 700 ~ 999 and 1050 ~ 1349.<br>
*
* <p>UnSeq files<br>
* first, second and third file has d0 ~ d2 and s0 ~ s3, time range is 20 ~ 219, 250 ~ 449 and 480
* ~ 679, value range is 10020 ~ 10219, 10250 ~ 10449 and 10480 ~ 10679.<br>
* forth and fifth file has d0 and s0 ~ s4, time range is 450 ~ 549 and 550 ~ 649, value range is
* 20450 ~ 20549 and 20550 ~ 20649.
*
* <p>The data of d0, d1 and d2 is deleted in each file. The first target file is empty.
*/
@Test
public void testAlignedCrossSpaceCompactionWithAllDataDeletedInOneTargetFile() throws Exception {
TSFileDescriptor.getInstance().getConfig().setMaxNumberOfPointsInPage(30);
registerTimeseriesInMManger(4, 5, true);
createFiles(2, 2, 3, 300, 0, 0, 50, 50, true, true);
createFiles(2, 4, 5, 300, 700, 700, 50, 50, true, true);
createFiles(3, 3, 4, 200, 20, 10020, 30, 30, true, false);
createFiles(2, 1, 5, 100, 450, 20450, 0, 0, true, false);
// generate mods file
List<String> seriesPaths = new ArrayList<>();
for (int i = 0; i < 5; i++) {
seriesPaths.add(
COMPACTION_TEST_SG
+ PATH_SEPARATOR
+ "d"
+ TsFileGeneratorUtils.getAlignDeviceOffset()
+ PATH_SEPARATOR
+ "s"
+ i);
seriesPaths.add(
COMPACTION_TEST_SG
+ PATH_SEPARATOR
+ "d"
+ (TsFileGeneratorUtils.getAlignDeviceOffset() + 1)
+ PATH_SEPARATOR
+ "s"
+ i);
seriesPaths.add(
COMPACTION_TEST_SG
+ PATH_SEPARATOR
+ "d"
+ (TsFileGeneratorUtils.getAlignDeviceOffset() + 2)
+ PATH_SEPARATOR
+ "s"
+ i);
seriesPaths.add(COMPACTION_TEST_SG + PATH_SEPARATOR + "d0" + PATH_SEPARATOR + "s" + i);
seriesPaths.add(COMPACTION_TEST_SG + PATH_SEPARATOR + "d1" + PATH_SEPARATOR + "s" + i);
seriesPaths.add(COMPACTION_TEST_SG + PATH_SEPARATOR + "d2" + PATH_SEPARATOR + "s" + i);
}
generateModsFile(seriesPaths, seqResources, Long.MIN_VALUE, Long.MAX_VALUE, false);
generateModsFile(seriesPaths, unseqResources, Long.MIN_VALUE, Long.MAX_VALUE, false);
generateModsFile(seriesPaths, seqResources, Long.MIN_VALUE, Long.MAX_VALUE, true);
generateModsFile(seriesPaths, unseqResources, Long.MIN_VALUE, Long.MAX_VALUE, true);
for (int i = TsFileGeneratorUtils.getAlignDeviceOffset();
i < TsFileGeneratorUtils.getAlignDeviceOffset() + 4;
i++) {
for (int j = 0; j < 5; j++) {
List<IMeasurementSchema> schemas = new ArrayList<>();
schemas.add(new MeasurementSchema("s" + j, TSDataType.INT64));
IFullPath path =
new AlignedFullPath(
IDeviceID.Factory.DEFAULT_FACTORY.create(
COMPACTION_TEST_SG + PATH_SEPARATOR + "d" + i),
Collections.singletonList("s" + j),
schemas);
IDataBlockReader tsFilesReader =
new SeriesDataBlockReader(
path,
FragmentInstanceContext.createFragmentInstanceContextForCompaction(
EnvironmentUtils.TEST_QUERY_CONTEXT.getQueryId()),
seqResources,
unseqResources,
true);
int count = 0;
while (tsFilesReader.hasNextBatch()) {
TsBlock batchData = tsFilesReader.nextBatch();
for (int readIndex = 0, size = batchData.getPositionCount();
readIndex < size;
readIndex++) {
long currentTime = batchData.getTimeByIndex(readIndex);
long currentValue = batchData.getColumn(0).getLong(readIndex);
if (i == TsFileGeneratorUtils.getAlignDeviceOffset()
&& ((450 <= currentTime && currentTime < 550)
|| (550 <= currentTime && currentTime < 650))) {
assertEquals(currentTime + 20000, currentValue);
} else if ((i < TsFileGeneratorUtils.getAlignDeviceOffset() + 3 && j < 4)
&& ((20 <= currentTime && currentTime < 220)
|| (250 <= currentTime && currentTime < 450)
|| (480 <= currentTime && currentTime < 680))) {
assertEquals(currentTime + 10000, currentValue);
} else {
assertEquals(currentTime, currentValue);
}
count++;
}
}
tsFilesReader.close();
if (i == 0 || i == 1 || i == 2) {
assertEquals(0, count);
}
if ((i == TsFileGeneratorUtils.getAlignDeviceOffset())
|| (i == TsFileGeneratorUtils.getAlignDeviceOffset() + 1)
|| (i == TsFileGeneratorUtils.getAlignDeviceOffset() + 2)) {
assertEquals(0, count);
} else if (i < TsFileGeneratorUtils.getAlignDeviceOffset() + 2 && j < 3) {
assertEquals(1280, count);
} else if (i < TsFileGeneratorUtils.getAlignDeviceOffset() + 1 && j < 4) {
assertEquals(1230, count);
} else if ((i == TsFileGeneratorUtils.getAlignDeviceOffset() + 1 && j == 4)) {
assertEquals(600, count);
} else if (i < TsFileGeneratorUtils.getAlignDeviceOffset() + 3 && j < 4) {
assertEquals(1200, count);
} else {
assertEquals(600, count);
}
}
}
List<TsFileResource> targetResources =
CompactionFileGeneratorUtils.getCrossCompactionTargetTsFileResources(seqResources);
TsFileManager tsFileManager =
new TsFileManager(COMPACTION_TEST_SG, "0", STORAGE_GROUP_DIR.getPath());
tsFileManager.addAll(seqResources, true);
tsFileManager.addAll(unseqResources, false);
CrossSpaceCompactionTask task =
new CrossSpaceCompactionTask(
0,
tsFileManager,
seqResources,
unseqResources,
new ReadPointCompactionPerformer(),
0,
0);
task.start();
for (TsFileResource resource : seqResources) {
Assert.assertFalse(resource.anyModFileExists());
}
for (TsFileResource resource : unseqResources) {
Assert.assertFalse(resource.anyModFileExists());
}
for (TsFileResource resource : targetResources) {
resource.setFile(
new File(
resource
.getTsFilePath()
.replace(CROSS_COMPACTION_TMP_FILE_SUFFIX, TsFileConstant.TSFILE_SUFFIX)));
if (!resource.getTsFile().exists()) {
continue;
}
Assert.assertTrue(resource.anyModFileExists());
Assert.assertEquals(30, resource.getAllModEntries().size());
}
FileReaderManager.getInstance().closeAndRemoveAllOpenedReaders();
for (int i = TsFileGeneratorUtils.getAlignDeviceOffset();
i < TsFileGeneratorUtils.getAlignDeviceOffset() + 4;
i++) {
for (int j = 0; j < 5; j++) {
List<IMeasurementSchema> schemas = new ArrayList<>();
schemas.add(new MeasurementSchema("s" + j, TSDataType.INT64));
IFullPath path =
new AlignedFullPath(
IDeviceID.Factory.DEFAULT_FACTORY.create(
COMPACTION_TEST_SG + PATH_SEPARATOR + "d" + i),
Collections.singletonList("s" + j),
schemas);
IDataBlockReader tsFilesReader =
new SeriesDataBlockReader(
path,
FragmentInstanceContext.createFragmentInstanceContextForCompaction(
EnvironmentUtils.TEST_QUERY_CONTEXT.getQueryId()),
tsFileManager.getTsFileList(true),
new ArrayList<>(),
true);
int count = 0;
while (tsFilesReader.hasNextBatch()) {
TsBlock batchData = tsFilesReader.nextBatch();
for (int readIndex = 0, size = batchData.getPositionCount();
readIndex < size;
readIndex++) {
long currentTime = batchData.getTimeByIndex(readIndex);
long currentValue = batchData.getColumn(0).getLong(readIndex);
if (i == TsFileGeneratorUtils.getAlignDeviceOffset()
&& ((450 <= currentTime && currentTime < 550)
|| (550 <= currentTime && currentTime < 650))) {
assertEquals(currentTime + 20000, currentValue);
} else if ((i < TsFileGeneratorUtils.getAlignDeviceOffset() + 3 && j < 4)
&& ((20 <= currentTime && currentTime < 220)
|| (250 <= currentTime && currentTime < 450)
|| (480 <= currentTime && currentTime < 680))) {
assertEquals(currentTime + 10000, currentValue);
} else {
assertEquals(currentTime, currentValue);
}
count++;
}
}
tsFilesReader.close();
if (i == 0 || i == 1 || i == 2) {
assertEquals(0, count);
}
if ((i == TsFileGeneratorUtils.getAlignDeviceOffset())
|| (i == TsFileGeneratorUtils.getAlignDeviceOffset() + 1)
|| (i == TsFileGeneratorUtils.getAlignDeviceOffset() + 2)) {
assertEquals(0, count);
} else if (i < TsFileGeneratorUtils.getAlignDeviceOffset() + 2 && j < 3) {
assertEquals(1280, count);
} else if (i < TsFileGeneratorUtils.getAlignDeviceOffset() + 1 && j < 4) {
assertEquals(1230, count);
} else if ((i == TsFileGeneratorUtils.getAlignDeviceOffset() + 1 && j == 4)) {
assertEquals(600, count);
} else if (i < TsFileGeneratorUtils.getAlignDeviceOffset() + 3 && j < 4) {
assertEquals(1200, count);
} else {
assertEquals(600, count);
}
}
}
}
/**
* Total 4 seq files and 5 unseq files, each file has different aligned timeseries.
*
* <p>Seq files<br>
* first and second file has d0 ~ d1 and s0 ~ s2, time range is 0 ~ 299 and 350 ~ 649, value range
* is 0 ~ 299 and 350 ~ 649.<br>
* third and forth file has d0 ~ d3 and s0 ~ S4,time range is 700 ~ 999 and 1050 ~ 1349, value
* range is 700 ~ 999 and 1050 ~ 1349.<br>
*
* <p>UnSeq files<br>
* first, second and third file has d0 ~ d2 and s0 ~ s3, time range is 20 ~ 219, 250 ~ 449 and 480
* ~ 679, value range is 10020 ~ 10219, 10250 ~ 10449 and 10480 ~ 10679.<br>
* forth and fifth file has d0 and s0 ~ s4, time range is 450 ~ 549 and 550 ~ 649, value range is
* 20450 ~ 20549 and 20550 ~ 20649.
*
* <p>The data of d3.s0 is deleted. Test when there is a deletion to the file before compaction,
* then comes to a deletion during compaction.
*/
@Test
public void testOneDeletionDuringCompaction() throws Exception {
DataRegion vsgp =
new DataRegion(
STORAGE_GROUP_DIR.getPath(),
"0",
new TsFileFlushPolicy.DirectFlushPolicy(),
COMPACTION_TEST_SG);
registerTimeseriesInMManger(4, 5, true);
createFiles(2, 2, 3, 300, 0, 0, 50, 50, true, true);
createFiles(2, 4, 5, 300, 700, 700, 50, 50, true, true);
createFiles(3, 3, 4, 200, 20, 10020, 30, 30, true, false);
createFiles(2, 1, 5, 100, 450, 20450, 0, 0, true, false);
vsgp.getTsFileResourceManager().addAll(seqResources, true);
vsgp.getTsFileResourceManager().addAll(unseqResources, false);
MeasurementPath path =
new MeasurementPath(
COMPACTION_TEST_SG
+ PATH_SEPARATOR
+ "d"
+ (TsFileGeneratorUtils.getAlignDeviceOffset() + 3)
+ PATH_SEPARATOR
+ "s0");
DeleteDataNode deleteDataNode =
new DeleteDataNode(new PlanNodeId("1"), Collections.singletonList(path), 0, 1000);
deleteDataNode.setSearchIndex(0);
vsgp.deleteByDevice(
new MeasurementPath(
COMPACTION_TEST_SG
+ PATH_SEPARATOR
+ "d"
+ (TsFileGeneratorUtils.getAlignDeviceOffset() + 3)
+ PATH_SEPARATOR
+ "s0"),
deleteDataNode);
CrossSpaceCompactionTask task =
new CrossSpaceCompactionTask(
0,
vsgp.getTsFileResourceManager(),
seqResources,
unseqResources,
new ReadPointCompactionPerformer(),
0,
0);
task.setSourceFilesToCompactionCandidate();
seqResources.forEach(f -> f.setStatus(TsFileResourceStatus.COMPACTING));
unseqResources.forEach(f -> f.setStatus(TsFileResourceStatus.COMPACTING));
// delete data in source file during compaction
DeleteDataNode deleteDataNode2 =
new DeleteDataNode(new PlanNodeId("2"), Collections.singletonList(path), 0, 1200);
deleteDataNode2.setSearchIndex(0);
vsgp.deleteByDevice(
new MeasurementPath(
COMPACTION_TEST_SG
+ PATH_SEPARATOR
+ "d"
+ (TsFileGeneratorUtils.getAlignDeviceOffset() + 3)
+ PATH_SEPARATOR
+ "s0"),
deleteDataNode2);
for (int i = 0; i < seqResources.size(); i++) {
TsFileResource resource = seqResources.get(i);
resource.resetModFile();
if (i < 2) {
Assert.assertFalse(resource.getCompactionModFile().exists());
Assert.assertFalse(resource.anyModFileExists());
} else if (i == 2) {
Assert.assertTrue(resource.anyModFileExists());
Assert.assertEquals(2, resource.getAllModEntries().size());
} else {
Assert.assertTrue(resource.anyModFileExists());
Assert.assertEquals(1, resource.getAllModEntries().size());
}
}
for (TsFileResource resource : unseqResources) {
resource.resetModFile();
Assert.assertFalse(resource.getCompactionModFile().exists());
Assert.assertFalse(resource.anyModFileExists());
}
task.start();
for (TsFileResource resource : seqResources) {
Assert.assertFalse(resource.getTsFile().exists());
Assert.assertFalse(resource.anyModFileExists());
Assert.assertFalse(resource.getCompactionModFile().exists());
}
for (TsFileResource resource : unseqResources) {
Assert.assertFalse(resource.getTsFile().exists());
Assert.assertFalse(resource.anyModFileExists());
Assert.assertFalse(resource.getCompactionModFile().exists());
}
for (int i = 0; i < seqResources.size(); i++) {
TsFileResource seqResource = seqResources.get(i);
TsFileResource resource =
new TsFileResource(
TsFileNameGenerator.increaseCrossCompactionCnt(seqResource.getTsFile()));
if (i < 2) {
Assert.assertFalse(resource.getCompactionModFile().exists());
Assert.assertFalse(resource.anyModFileExists());
} else {
Assert.assertFalse(resource.getCompactionModFile().exists());
}
}
}
/**
* Total 4 seq files and 5 unseq files, each file has different aligned timeseries.
*
* <p>Seq files<br>
* first and second file has d0 ~ d1 and s0 ~ s2, time range is 0 ~ 299 and 350 ~ 649, value range
* is 0 ~ 299 and 350 ~ 649.<br>
* third and forth file has d0 ~ d3 and s0 ~ S4,time range is 700 ~ 999 and 1050 ~ 1349, value
* range is 700 ~ 999 and 1050 ~ 1349.<br>
*
* <p>UnSeq files<br>
* first, second and third file has d0 ~ d2 and s0 ~ s3, time range is 20 ~ 219, 250 ~ 449 and 480
* ~ 679, value range is 10020 ~ 10219, 10250 ~ 10449 and 10480 ~ 10679.<br>
* forth and fifth file has d0 and s0 ~ s4, time range is 450 ~ 549 and 550 ~ 649, value range is
* 20450 ~ 20549 and 20550 ~ 20649.
*
* <p>The data of d3.s0 is deleted. Test when there is a deletion to the file before compaction,
* then comes to serveral deletions during compaction.
*/
@Test
public void testSeveralDeletionsDuringCompaction() throws Exception {
DataRegion vsgp =
new DataRegion(
STORAGE_GROUP_DIR.getPath(),
"0",
new TsFileFlushPolicy.DirectFlushPolicy(),
COMPACTION_TEST_SG);
registerTimeseriesInMManger(4, 5, true);
createFiles(2, 2, 3, 300, 0, 0, 50, 50, true, true);
createFiles(2, 4, 5, 300, 700, 700, 50, 50, true, true);
createFiles(3, 3, 4, 200, 20, 10020, 30, 30, true, false);
createFiles(2, 1, 5, 100, 450, 20450, 0, 0, true, false);
vsgp.getTsFileResourceManager().addAll(seqResources, true);
vsgp.getTsFileResourceManager().addAll(unseqResources, false);
MeasurementPath path =
new MeasurementPath(
COMPACTION_TEST_SG
+ PATH_SEPARATOR
+ "d"
+ (TsFileGeneratorUtils.getAlignDeviceOffset() + 3)
+ PATH_SEPARATOR
+ "s0");
DeleteDataNode deleteDataNode =
new DeleteDataNode(new PlanNodeId("1"), Collections.singletonList(path), 0, 1000);
deleteDataNode.setSearchIndex(0);
vsgp.deleteByDevice(
new MeasurementPath(
COMPACTION_TEST_SG
+ PATH_SEPARATOR
+ "d"
+ (TsFileGeneratorUtils.getAlignDeviceOffset() + 3)
+ PATH_SEPARATOR
+ "s0"),
deleteDataNode);
CrossSpaceCompactionTask task =
new CrossSpaceCompactionTask(
0,
vsgp.getTsFileResourceManager(),
seqResources,
unseqResources,
new ReadPointCompactionPerformer(),
0,
0);
task.setSourceFilesToCompactionCandidate();
seqResources.forEach(f -> f.setStatus(TsFileResourceStatus.COMPACTING));
unseqResources.forEach(f -> f.setStatus(TsFileResourceStatus.COMPACTING));
// delete data in source file during compaction
DeleteDataNode deleteDataNode2 =
new DeleteDataNode(new PlanNodeId("2"), Collections.singletonList(path), 0, 1200);
deleteDataNode2.setSearchIndex(0);
vsgp.deleteByDevice(
new MeasurementPath(
COMPACTION_TEST_SG
+ PATH_SEPARATOR
+ "d"
+ (TsFileGeneratorUtils.getAlignDeviceOffset() + 3)
+ PATH_SEPARATOR
+ "s0"),
deleteDataNode2);
DeleteDataNode deleteDataNode3 =
new DeleteDataNode(new PlanNodeId("3"), Collections.singletonList(path), 0, 1800);
deleteDataNode3.setSearchIndex(0);
vsgp.deleteByDevice(
new MeasurementPath(
COMPACTION_TEST_SG
+ PATH_SEPARATOR
+ "d"
+ (TsFileGeneratorUtils.getAlignDeviceOffset() + 3)
+ PATH_SEPARATOR
+ "s0"),
deleteDataNode3);
for (int i = 0; i < seqResources.size(); i++) {
TsFileResource resource = seqResources.get(i);
resource.resetModFile();
if (i < 2) {
Assert.assertFalse(resource.getCompactionModFile().exists());
Assert.assertFalse(resource.anyModFileExists());
} else if (i == 2) {
Assert.assertTrue(resource.anyModFileExists());
Assert.assertEquals(3, resource.getAllModEntries().size());
} else {
Assert.assertTrue(resource.anyModFileExists());
Assert.assertEquals(2, resource.getAllModEntries().size());
}
}
for (TsFileResource resource : unseqResources) {
resource.resetModFile();
Assert.assertFalse(resource.getCompactionModFile().exists());
Assert.assertFalse(resource.anyModFileExists());
}
task.start();
for (TsFileResource resource : seqResources) {
Assert.assertFalse(resource.getTsFile().exists());
Assert.assertFalse(resource.anyModFileExists());
Assert.assertFalse(resource.getCompactionModFile().exists());
}
for (TsFileResource resource : unseqResources) {
Assert.assertFalse(resource.getTsFile().exists());
Assert.assertFalse(resource.anyModFileExists());
Assert.assertFalse(resource.getCompactionModFile().exists());
}
for (int i = 0; i < seqResources.size(); i++) {
TsFileResource seqResource = seqResources.get(i);
TsFileResource resource =
new TsFileResource(
TsFileNameGenerator.increaseCrossCompactionCnt(seqResource.getTsFile()));
if (i < 2) {
Assert.assertFalse(resource.getCompactionModFile().exists());
Assert.assertFalse(resource.anyModFileExists());
} else {
Assert.assertFalse(resource.getCompactionModFile().exists());
}
}
}
private void generateModsFile(
List<String> seriesPaths,
List<TsFileResource> resources,
long startValue,
long endValue,
boolean isCompactionMods)
throws IllegalPathException, IOException {
for (TsFileResource resource : resources) {
Map<String, Pair<Long, Long>> deleteMap = new HashMap<>();
for (String path : seriesPaths) {
deleteMap.put(path, new Pair<>(startValue, endValue));
}
CompactionFileGeneratorUtils.generateMods(deleteMap, resource, isCompactionMods);
}
}
}
|
googleapis/google-cloud-java | 36,017 | java-dialogflow/proto-google-cloud-dialogflow-v2beta1/src/main/java/com/google/cloud/dialogflow/v2beta1/BargeInConfig.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/v2beta1/audio_config.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.v2beta1;
/**
*
*
* <pre>
* Configuration of the barge-in behavior. Barge-in instructs the API to return
* a detected utterance at a proper time while the client is playing back the
* response audio from a previous request. When the client sees the
* utterance, it should stop the playback and immediately get ready for
* receiving the responses for the current request.
*
* The barge-in handling requires the client to start streaming audio input
* as soon as it starts playing back the audio from the previous response. The
* playback is modeled into two phases:
*
* * No barge-in phase: which goes first and during which speech detection
* should not be carried out.
*
* * Barge-in phase: which follows the no barge-in phase and during which
* the API starts speech detection and may inform the client that an utterance
* has been detected. Note that no-speech event is not expected in this
* phase.
*
* The client provides this configuration in terms of the durations of those
* two phases. The durations are measured in terms of the audio length from the
* start of the input audio.
*
* The flow goes like below:
*
* ```
* --> Time
*
* without speech detection | utterance only | utterance or no-speech event
* | |
* +-------------+ | +------------+ | +---------------+
* ----------+ no barge-in +-|-+ barge-in +-|-+ normal period +-----------
* +-------------+ | +------------+ | +---------------+
* ```
*
* No-speech event is a response with END_OF_UTTERANCE without any transcript
* following up.
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2beta1.BargeInConfig}
*/
public final class BargeInConfig extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2beta1.BargeInConfig)
BargeInConfigOrBuilder {
private static final long serialVersionUID = 0L;
// Use BargeInConfig.newBuilder() to construct.
private BargeInConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private BargeInConfig() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new BargeInConfig();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2beta1.AudioConfigProto
.internal_static_google_cloud_dialogflow_v2beta1_BargeInConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2beta1.AudioConfigProto
.internal_static_google_cloud_dialogflow_v2beta1_BargeInConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2beta1.BargeInConfig.class,
com.google.cloud.dialogflow.v2beta1.BargeInConfig.Builder.class);
}
private int bitField0_;
public static final int NO_BARGE_IN_DURATION_FIELD_NUMBER = 1;
private com.google.protobuf.Duration noBargeInDuration_;
/**
*
*
* <pre>
* Duration that is not eligible for barge-in at the beginning of the input
* audio.
* </pre>
*
* <code>.google.protobuf.Duration no_barge_in_duration = 1;</code>
*
* @return Whether the noBargeInDuration field is set.
*/
@java.lang.Override
public boolean hasNoBargeInDuration() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Duration that is not eligible for barge-in at the beginning of the input
* audio.
* </pre>
*
* <code>.google.protobuf.Duration no_barge_in_duration = 1;</code>
*
* @return The noBargeInDuration.
*/
@java.lang.Override
public com.google.protobuf.Duration getNoBargeInDuration() {
return noBargeInDuration_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: noBargeInDuration_;
}
/**
*
*
* <pre>
* Duration that is not eligible for barge-in at the beginning of the input
* audio.
* </pre>
*
* <code>.google.protobuf.Duration no_barge_in_duration = 1;</code>
*/
@java.lang.Override
public com.google.protobuf.DurationOrBuilder getNoBargeInDurationOrBuilder() {
return noBargeInDuration_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: noBargeInDuration_;
}
public static final int TOTAL_DURATION_FIELD_NUMBER = 2;
private com.google.protobuf.Duration totalDuration_;
/**
*
*
* <pre>
* Total duration for the playback at the beginning of the input audio.
* </pre>
*
* <code>.google.protobuf.Duration total_duration = 2;</code>
*
* @return Whether the totalDuration field is set.
*/
@java.lang.Override
public boolean hasTotalDuration() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Total duration for the playback at the beginning of the input audio.
* </pre>
*
* <code>.google.protobuf.Duration total_duration = 2;</code>
*
* @return The totalDuration.
*/
@java.lang.Override
public com.google.protobuf.Duration getTotalDuration() {
return totalDuration_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: totalDuration_;
}
/**
*
*
* <pre>
* Total duration for the playback at the beginning of the input audio.
* </pre>
*
* <code>.google.protobuf.Duration total_duration = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.DurationOrBuilder getTotalDurationOrBuilder() {
return totalDuration_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: totalDuration_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getNoBargeInDuration());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getTotalDuration());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getNoBargeInDuration());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getTotalDuration());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.v2beta1.BargeInConfig)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.v2beta1.BargeInConfig other =
(com.google.cloud.dialogflow.v2beta1.BargeInConfig) obj;
if (hasNoBargeInDuration() != other.hasNoBargeInDuration()) return false;
if (hasNoBargeInDuration()) {
if (!getNoBargeInDuration().equals(other.getNoBargeInDuration())) return false;
}
if (hasTotalDuration() != other.hasTotalDuration()) return false;
if (hasTotalDuration()) {
if (!getTotalDuration().equals(other.getTotalDuration())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasNoBargeInDuration()) {
hash = (37 * hash) + NO_BARGE_IN_DURATION_FIELD_NUMBER;
hash = (53 * hash) + getNoBargeInDuration().hashCode();
}
if (hasTotalDuration()) {
hash = (37 * hash) + TOTAL_DURATION_FIELD_NUMBER;
hash = (53 * hash) + getTotalDuration().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.v2beta1.BargeInConfig parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.BargeInConfig parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.BargeInConfig parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.BargeInConfig parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.BargeInConfig parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.BargeInConfig parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.BargeInConfig parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.BargeInConfig parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.BargeInConfig parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.BargeInConfig parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.BargeInConfig parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.BargeInConfig parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.dialogflow.v2beta1.BargeInConfig prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Configuration of the barge-in behavior. Barge-in instructs the API to return
* a detected utterance at a proper time while the client is playing back the
* response audio from a previous request. When the client sees the
* utterance, it should stop the playback and immediately get ready for
* receiving the responses for the current request.
*
* The barge-in handling requires the client to start streaming audio input
* as soon as it starts playing back the audio from the previous response. The
* playback is modeled into two phases:
*
* * No barge-in phase: which goes first and during which speech detection
* should not be carried out.
*
* * Barge-in phase: which follows the no barge-in phase and during which
* the API starts speech detection and may inform the client that an utterance
* has been detected. Note that no-speech event is not expected in this
* phase.
*
* The client provides this configuration in terms of the durations of those
* two phases. The durations are measured in terms of the audio length from the
* start of the input audio.
*
* The flow goes like below:
*
* ```
* --> Time
*
* without speech detection | utterance only | utterance or no-speech event
* | |
* +-------------+ | +------------+ | +---------------+
* ----------+ no barge-in +-|-+ barge-in +-|-+ normal period +-----------
* +-------------+ | +------------+ | +---------------+
* ```
*
* No-speech event is a response with END_OF_UTTERANCE without any transcript
* following up.
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2beta1.BargeInConfig}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2beta1.BargeInConfig)
com.google.cloud.dialogflow.v2beta1.BargeInConfigOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2beta1.AudioConfigProto
.internal_static_google_cloud_dialogflow_v2beta1_BargeInConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2beta1.AudioConfigProto
.internal_static_google_cloud_dialogflow_v2beta1_BargeInConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2beta1.BargeInConfig.class,
com.google.cloud.dialogflow.v2beta1.BargeInConfig.Builder.class);
}
// Construct using com.google.cloud.dialogflow.v2beta1.BargeInConfig.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getNoBargeInDurationFieldBuilder();
getTotalDurationFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
noBargeInDuration_ = null;
if (noBargeInDurationBuilder_ != null) {
noBargeInDurationBuilder_.dispose();
noBargeInDurationBuilder_ = null;
}
totalDuration_ = null;
if (totalDurationBuilder_ != null) {
totalDurationBuilder_.dispose();
totalDurationBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.v2beta1.AudioConfigProto
.internal_static_google_cloud_dialogflow_v2beta1_BargeInConfig_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.BargeInConfig getDefaultInstanceForType() {
return com.google.cloud.dialogflow.v2beta1.BargeInConfig.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.BargeInConfig build() {
com.google.cloud.dialogflow.v2beta1.BargeInConfig result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.BargeInConfig buildPartial() {
com.google.cloud.dialogflow.v2beta1.BargeInConfig result =
new com.google.cloud.dialogflow.v2beta1.BargeInConfig(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.dialogflow.v2beta1.BargeInConfig result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.noBargeInDuration_ =
noBargeInDurationBuilder_ == null
? noBargeInDuration_
: noBargeInDurationBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.totalDuration_ =
totalDurationBuilder_ == null ? totalDuration_ : totalDurationBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.v2beta1.BargeInConfig) {
return mergeFrom((com.google.cloud.dialogflow.v2beta1.BargeInConfig) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.v2beta1.BargeInConfig other) {
if (other == com.google.cloud.dialogflow.v2beta1.BargeInConfig.getDefaultInstance())
return this;
if (other.hasNoBargeInDuration()) {
mergeNoBargeInDuration(other.getNoBargeInDuration());
}
if (other.hasTotalDuration()) {
mergeTotalDuration(other.getTotalDuration());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(
getNoBargeInDurationFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getTotalDurationFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.Duration noBargeInDuration_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>
noBargeInDurationBuilder_;
/**
*
*
* <pre>
* Duration that is not eligible for barge-in at the beginning of the input
* audio.
* </pre>
*
* <code>.google.protobuf.Duration no_barge_in_duration = 1;</code>
*
* @return Whether the noBargeInDuration field is set.
*/
public boolean hasNoBargeInDuration() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Duration that is not eligible for barge-in at the beginning of the input
* audio.
* </pre>
*
* <code>.google.protobuf.Duration no_barge_in_duration = 1;</code>
*
* @return The noBargeInDuration.
*/
public com.google.protobuf.Duration getNoBargeInDuration() {
if (noBargeInDurationBuilder_ == null) {
return noBargeInDuration_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: noBargeInDuration_;
} else {
return noBargeInDurationBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Duration that is not eligible for barge-in at the beginning of the input
* audio.
* </pre>
*
* <code>.google.protobuf.Duration no_barge_in_duration = 1;</code>
*/
public Builder setNoBargeInDuration(com.google.protobuf.Duration value) {
if (noBargeInDurationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
noBargeInDuration_ = value;
} else {
noBargeInDurationBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Duration that is not eligible for barge-in at the beginning of the input
* audio.
* </pre>
*
* <code>.google.protobuf.Duration no_barge_in_duration = 1;</code>
*/
public Builder setNoBargeInDuration(com.google.protobuf.Duration.Builder builderForValue) {
if (noBargeInDurationBuilder_ == null) {
noBargeInDuration_ = builderForValue.build();
} else {
noBargeInDurationBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Duration that is not eligible for barge-in at the beginning of the input
* audio.
* </pre>
*
* <code>.google.protobuf.Duration no_barge_in_duration = 1;</code>
*/
public Builder mergeNoBargeInDuration(com.google.protobuf.Duration value) {
if (noBargeInDurationBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& noBargeInDuration_ != null
&& noBargeInDuration_ != com.google.protobuf.Duration.getDefaultInstance()) {
getNoBargeInDurationBuilder().mergeFrom(value);
} else {
noBargeInDuration_ = value;
}
} else {
noBargeInDurationBuilder_.mergeFrom(value);
}
if (noBargeInDuration_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Duration that is not eligible for barge-in at the beginning of the input
* audio.
* </pre>
*
* <code>.google.protobuf.Duration no_barge_in_duration = 1;</code>
*/
public Builder clearNoBargeInDuration() {
bitField0_ = (bitField0_ & ~0x00000001);
noBargeInDuration_ = null;
if (noBargeInDurationBuilder_ != null) {
noBargeInDurationBuilder_.dispose();
noBargeInDurationBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Duration that is not eligible for barge-in at the beginning of the input
* audio.
* </pre>
*
* <code>.google.protobuf.Duration no_barge_in_duration = 1;</code>
*/
public com.google.protobuf.Duration.Builder getNoBargeInDurationBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getNoBargeInDurationFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Duration that is not eligible for barge-in at the beginning of the input
* audio.
* </pre>
*
* <code>.google.protobuf.Duration no_barge_in_duration = 1;</code>
*/
public com.google.protobuf.DurationOrBuilder getNoBargeInDurationOrBuilder() {
if (noBargeInDurationBuilder_ != null) {
return noBargeInDurationBuilder_.getMessageOrBuilder();
} else {
return noBargeInDuration_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: noBargeInDuration_;
}
}
/**
*
*
* <pre>
* Duration that is not eligible for barge-in at the beginning of the input
* audio.
* </pre>
*
* <code>.google.protobuf.Duration no_barge_in_duration = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>
getNoBargeInDurationFieldBuilder() {
if (noBargeInDurationBuilder_ == null) {
noBargeInDurationBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>(
getNoBargeInDuration(), getParentForChildren(), isClean());
noBargeInDuration_ = null;
}
return noBargeInDurationBuilder_;
}
private com.google.protobuf.Duration totalDuration_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>
totalDurationBuilder_;
/**
*
*
* <pre>
* Total duration for the playback at the beginning of the input audio.
* </pre>
*
* <code>.google.protobuf.Duration total_duration = 2;</code>
*
* @return Whether the totalDuration field is set.
*/
public boolean hasTotalDuration() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Total duration for the playback at the beginning of the input audio.
* </pre>
*
* <code>.google.protobuf.Duration total_duration = 2;</code>
*
* @return The totalDuration.
*/
public com.google.protobuf.Duration getTotalDuration() {
if (totalDurationBuilder_ == null) {
return totalDuration_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: totalDuration_;
} else {
return totalDurationBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Total duration for the playback at the beginning of the input audio.
* </pre>
*
* <code>.google.protobuf.Duration total_duration = 2;</code>
*/
public Builder setTotalDuration(com.google.protobuf.Duration value) {
if (totalDurationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
totalDuration_ = value;
} else {
totalDurationBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Total duration for the playback at the beginning of the input audio.
* </pre>
*
* <code>.google.protobuf.Duration total_duration = 2;</code>
*/
public Builder setTotalDuration(com.google.protobuf.Duration.Builder builderForValue) {
if (totalDurationBuilder_ == null) {
totalDuration_ = builderForValue.build();
} else {
totalDurationBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Total duration for the playback at the beginning of the input audio.
* </pre>
*
* <code>.google.protobuf.Duration total_duration = 2;</code>
*/
public Builder mergeTotalDuration(com.google.protobuf.Duration value) {
if (totalDurationBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& totalDuration_ != null
&& totalDuration_ != com.google.protobuf.Duration.getDefaultInstance()) {
getTotalDurationBuilder().mergeFrom(value);
} else {
totalDuration_ = value;
}
} else {
totalDurationBuilder_.mergeFrom(value);
}
if (totalDuration_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Total duration for the playback at the beginning of the input audio.
* </pre>
*
* <code>.google.protobuf.Duration total_duration = 2;</code>
*/
public Builder clearTotalDuration() {
bitField0_ = (bitField0_ & ~0x00000002);
totalDuration_ = null;
if (totalDurationBuilder_ != null) {
totalDurationBuilder_.dispose();
totalDurationBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Total duration for the playback at the beginning of the input audio.
* </pre>
*
* <code>.google.protobuf.Duration total_duration = 2;</code>
*/
public com.google.protobuf.Duration.Builder getTotalDurationBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getTotalDurationFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Total duration for the playback at the beginning of the input audio.
* </pre>
*
* <code>.google.protobuf.Duration total_duration = 2;</code>
*/
public com.google.protobuf.DurationOrBuilder getTotalDurationOrBuilder() {
if (totalDurationBuilder_ != null) {
return totalDurationBuilder_.getMessageOrBuilder();
} else {
return totalDuration_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: totalDuration_;
}
}
/**
*
*
* <pre>
* Total duration for the playback at the beginning of the input audio.
* </pre>
*
* <code>.google.protobuf.Duration total_duration = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>
getTotalDurationFieldBuilder() {
if (totalDurationBuilder_ == null) {
totalDurationBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>(
getTotalDuration(), getParentForChildren(), isClean());
totalDuration_ = null;
}
return totalDurationBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2beta1.BargeInConfig)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2beta1.BargeInConfig)
private static final com.google.cloud.dialogflow.v2beta1.BargeInConfig DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2beta1.BargeInConfig();
}
public static com.google.cloud.dialogflow.v2beta1.BargeInConfig getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<BargeInConfig> PARSER =
new com.google.protobuf.AbstractParser<BargeInConfig>() {
@java.lang.Override
public BargeInConfig parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<BargeInConfig> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<BargeInConfig> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.BargeInConfig getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,047 | java-gsuite-addons/proto-google-apps-script-type-protos/src/main/java/com/google/apps/script/type/docs/DocsAddOnManifest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/apps/script/type/docs/docs_addon_manifest.proto
// Protobuf Java Version: 3.25.8
package com.google.apps.script.type.docs;
/**
*
*
* <pre>
* Docs add-on manifest.
* </pre>
*
* Protobuf type {@code google.apps.script.type.docs.DocsAddOnManifest}
*/
public final class DocsAddOnManifest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.apps.script.type.docs.DocsAddOnManifest)
DocsAddOnManifestOrBuilder {
private static final long serialVersionUID = 0L;
// Use DocsAddOnManifest.newBuilder() to construct.
private DocsAddOnManifest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DocsAddOnManifest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new DocsAddOnManifest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.apps.script.type.docs.DocsAddOnManifestProto
.internal_static_google_apps_script_type_docs_DocsAddOnManifest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.apps.script.type.docs.DocsAddOnManifestProto
.internal_static_google_apps_script_type_docs_DocsAddOnManifest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.apps.script.type.docs.DocsAddOnManifest.class,
com.google.apps.script.type.docs.DocsAddOnManifest.Builder.class);
}
private int bitField0_;
public static final int HOMEPAGE_TRIGGER_FIELD_NUMBER = 1;
private com.google.apps.script.type.HomepageExtensionPoint homepageTrigger_;
/**
*
*
* <pre>
* If present, this overrides the configuration from
* `addOns.common.homepageTrigger`.
* </pre>
*
* <code>.google.apps.script.type.HomepageExtensionPoint homepage_trigger = 1;</code>
*
* @return Whether the homepageTrigger field is set.
*/
@java.lang.Override
public boolean hasHomepageTrigger() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* If present, this overrides the configuration from
* `addOns.common.homepageTrigger`.
* </pre>
*
* <code>.google.apps.script.type.HomepageExtensionPoint homepage_trigger = 1;</code>
*
* @return The homepageTrigger.
*/
@java.lang.Override
public com.google.apps.script.type.HomepageExtensionPoint getHomepageTrigger() {
return homepageTrigger_ == null
? com.google.apps.script.type.HomepageExtensionPoint.getDefaultInstance()
: homepageTrigger_;
}
/**
*
*
* <pre>
* If present, this overrides the configuration from
* `addOns.common.homepageTrigger`.
* </pre>
*
* <code>.google.apps.script.type.HomepageExtensionPoint homepage_trigger = 1;</code>
*/
@java.lang.Override
public com.google.apps.script.type.HomepageExtensionPointOrBuilder getHomepageTriggerOrBuilder() {
return homepageTrigger_ == null
? com.google.apps.script.type.HomepageExtensionPoint.getDefaultInstance()
: homepageTrigger_;
}
public static final int ON_FILE_SCOPE_GRANTED_TRIGGER_FIELD_NUMBER = 2;
private com.google.apps.script.type.docs.DocsExtensionPoint onFileScopeGrantedTrigger_;
/**
*
*
* <pre>
* Endpoint to execute when file scope authorization is granted
* for this document/user pair.
* </pre>
*
* <code>.google.apps.script.type.docs.DocsExtensionPoint on_file_scope_granted_trigger = 2;
* </code>
*
* @return Whether the onFileScopeGrantedTrigger field is set.
*/
@java.lang.Override
public boolean hasOnFileScopeGrantedTrigger() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Endpoint to execute when file scope authorization is granted
* for this document/user pair.
* </pre>
*
* <code>.google.apps.script.type.docs.DocsExtensionPoint on_file_scope_granted_trigger = 2;
* </code>
*
* @return The onFileScopeGrantedTrigger.
*/
@java.lang.Override
public com.google.apps.script.type.docs.DocsExtensionPoint getOnFileScopeGrantedTrigger() {
return onFileScopeGrantedTrigger_ == null
? com.google.apps.script.type.docs.DocsExtensionPoint.getDefaultInstance()
: onFileScopeGrantedTrigger_;
}
/**
*
*
* <pre>
* Endpoint to execute when file scope authorization is granted
* for this document/user pair.
* </pre>
*
* <code>.google.apps.script.type.docs.DocsExtensionPoint on_file_scope_granted_trigger = 2;
* </code>
*/
@java.lang.Override
public com.google.apps.script.type.docs.DocsExtensionPointOrBuilder
getOnFileScopeGrantedTriggerOrBuilder() {
return onFileScopeGrantedTrigger_ == null
? com.google.apps.script.type.docs.DocsExtensionPoint.getDefaultInstance()
: onFileScopeGrantedTrigger_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getHomepageTrigger());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getOnFileScopeGrantedTrigger());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getHomepageTrigger());
}
if (((bitField0_ & 0x00000002) != 0)) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
2, getOnFileScopeGrantedTrigger());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.apps.script.type.docs.DocsAddOnManifest)) {
return super.equals(obj);
}
com.google.apps.script.type.docs.DocsAddOnManifest other =
(com.google.apps.script.type.docs.DocsAddOnManifest) obj;
if (hasHomepageTrigger() != other.hasHomepageTrigger()) return false;
if (hasHomepageTrigger()) {
if (!getHomepageTrigger().equals(other.getHomepageTrigger())) return false;
}
if (hasOnFileScopeGrantedTrigger() != other.hasOnFileScopeGrantedTrigger()) return false;
if (hasOnFileScopeGrantedTrigger()) {
if (!getOnFileScopeGrantedTrigger().equals(other.getOnFileScopeGrantedTrigger()))
return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasHomepageTrigger()) {
hash = (37 * hash) + HOMEPAGE_TRIGGER_FIELD_NUMBER;
hash = (53 * hash) + getHomepageTrigger().hashCode();
}
if (hasOnFileScopeGrantedTrigger()) {
hash = (37 * hash) + ON_FILE_SCOPE_GRANTED_TRIGGER_FIELD_NUMBER;
hash = (53 * hash) + getOnFileScopeGrantedTrigger().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.apps.script.type.docs.DocsAddOnManifest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.apps.script.type.docs.DocsAddOnManifest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.apps.script.type.docs.DocsAddOnManifest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.apps.script.type.docs.DocsAddOnManifest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.apps.script.type.docs.DocsAddOnManifest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.apps.script.type.docs.DocsAddOnManifest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.apps.script.type.docs.DocsAddOnManifest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.apps.script.type.docs.DocsAddOnManifest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.apps.script.type.docs.DocsAddOnManifest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.apps.script.type.docs.DocsAddOnManifest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.apps.script.type.docs.DocsAddOnManifest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.apps.script.type.docs.DocsAddOnManifest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.apps.script.type.docs.DocsAddOnManifest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Docs add-on manifest.
* </pre>
*
* Protobuf type {@code google.apps.script.type.docs.DocsAddOnManifest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.apps.script.type.docs.DocsAddOnManifest)
com.google.apps.script.type.docs.DocsAddOnManifestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.apps.script.type.docs.DocsAddOnManifestProto
.internal_static_google_apps_script_type_docs_DocsAddOnManifest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.apps.script.type.docs.DocsAddOnManifestProto
.internal_static_google_apps_script_type_docs_DocsAddOnManifest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.apps.script.type.docs.DocsAddOnManifest.class,
com.google.apps.script.type.docs.DocsAddOnManifest.Builder.class);
}
// Construct using com.google.apps.script.type.docs.DocsAddOnManifest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getHomepageTriggerFieldBuilder();
getOnFileScopeGrantedTriggerFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
homepageTrigger_ = null;
if (homepageTriggerBuilder_ != null) {
homepageTriggerBuilder_.dispose();
homepageTriggerBuilder_ = null;
}
onFileScopeGrantedTrigger_ = null;
if (onFileScopeGrantedTriggerBuilder_ != null) {
onFileScopeGrantedTriggerBuilder_.dispose();
onFileScopeGrantedTriggerBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.apps.script.type.docs.DocsAddOnManifestProto
.internal_static_google_apps_script_type_docs_DocsAddOnManifest_descriptor;
}
@java.lang.Override
public com.google.apps.script.type.docs.DocsAddOnManifest getDefaultInstanceForType() {
return com.google.apps.script.type.docs.DocsAddOnManifest.getDefaultInstance();
}
@java.lang.Override
public com.google.apps.script.type.docs.DocsAddOnManifest build() {
com.google.apps.script.type.docs.DocsAddOnManifest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.apps.script.type.docs.DocsAddOnManifest buildPartial() {
com.google.apps.script.type.docs.DocsAddOnManifest result =
new com.google.apps.script.type.docs.DocsAddOnManifest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.apps.script.type.docs.DocsAddOnManifest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.homepageTrigger_ =
homepageTriggerBuilder_ == null ? homepageTrigger_ : homepageTriggerBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.onFileScopeGrantedTrigger_ =
onFileScopeGrantedTriggerBuilder_ == null
? onFileScopeGrantedTrigger_
: onFileScopeGrantedTriggerBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.apps.script.type.docs.DocsAddOnManifest) {
return mergeFrom((com.google.apps.script.type.docs.DocsAddOnManifest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.apps.script.type.docs.DocsAddOnManifest other) {
if (other == com.google.apps.script.type.docs.DocsAddOnManifest.getDefaultInstance())
return this;
if (other.hasHomepageTrigger()) {
mergeHomepageTrigger(other.getHomepageTrigger());
}
if (other.hasOnFileScopeGrantedTrigger()) {
mergeOnFileScopeGrantedTrigger(other.getOnFileScopeGrantedTrigger());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getHomepageTriggerFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(
getOnFileScopeGrantedTriggerFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.apps.script.type.HomepageExtensionPoint homepageTrigger_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.apps.script.type.HomepageExtensionPoint,
com.google.apps.script.type.HomepageExtensionPoint.Builder,
com.google.apps.script.type.HomepageExtensionPointOrBuilder>
homepageTriggerBuilder_;
/**
*
*
* <pre>
* If present, this overrides the configuration from
* `addOns.common.homepageTrigger`.
* </pre>
*
* <code>.google.apps.script.type.HomepageExtensionPoint homepage_trigger = 1;</code>
*
* @return Whether the homepageTrigger field is set.
*/
public boolean hasHomepageTrigger() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* If present, this overrides the configuration from
* `addOns.common.homepageTrigger`.
* </pre>
*
* <code>.google.apps.script.type.HomepageExtensionPoint homepage_trigger = 1;</code>
*
* @return The homepageTrigger.
*/
public com.google.apps.script.type.HomepageExtensionPoint getHomepageTrigger() {
if (homepageTriggerBuilder_ == null) {
return homepageTrigger_ == null
? com.google.apps.script.type.HomepageExtensionPoint.getDefaultInstance()
: homepageTrigger_;
} else {
return homepageTriggerBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* If present, this overrides the configuration from
* `addOns.common.homepageTrigger`.
* </pre>
*
* <code>.google.apps.script.type.HomepageExtensionPoint homepage_trigger = 1;</code>
*/
public Builder setHomepageTrigger(com.google.apps.script.type.HomepageExtensionPoint value) {
if (homepageTriggerBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
homepageTrigger_ = value;
} else {
homepageTriggerBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* If present, this overrides the configuration from
* `addOns.common.homepageTrigger`.
* </pre>
*
* <code>.google.apps.script.type.HomepageExtensionPoint homepage_trigger = 1;</code>
*/
public Builder setHomepageTrigger(
com.google.apps.script.type.HomepageExtensionPoint.Builder builderForValue) {
if (homepageTriggerBuilder_ == null) {
homepageTrigger_ = builderForValue.build();
} else {
homepageTriggerBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* If present, this overrides the configuration from
* `addOns.common.homepageTrigger`.
* </pre>
*
* <code>.google.apps.script.type.HomepageExtensionPoint homepage_trigger = 1;</code>
*/
public Builder mergeHomepageTrigger(com.google.apps.script.type.HomepageExtensionPoint value) {
if (homepageTriggerBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& homepageTrigger_ != null
&& homepageTrigger_
!= com.google.apps.script.type.HomepageExtensionPoint.getDefaultInstance()) {
getHomepageTriggerBuilder().mergeFrom(value);
} else {
homepageTrigger_ = value;
}
} else {
homepageTriggerBuilder_.mergeFrom(value);
}
if (homepageTrigger_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* If present, this overrides the configuration from
* `addOns.common.homepageTrigger`.
* </pre>
*
* <code>.google.apps.script.type.HomepageExtensionPoint homepage_trigger = 1;</code>
*/
public Builder clearHomepageTrigger() {
bitField0_ = (bitField0_ & ~0x00000001);
homepageTrigger_ = null;
if (homepageTriggerBuilder_ != null) {
homepageTriggerBuilder_.dispose();
homepageTriggerBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* If present, this overrides the configuration from
* `addOns.common.homepageTrigger`.
* </pre>
*
* <code>.google.apps.script.type.HomepageExtensionPoint homepage_trigger = 1;</code>
*/
public com.google.apps.script.type.HomepageExtensionPoint.Builder getHomepageTriggerBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getHomepageTriggerFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* If present, this overrides the configuration from
* `addOns.common.homepageTrigger`.
* </pre>
*
* <code>.google.apps.script.type.HomepageExtensionPoint homepage_trigger = 1;</code>
*/
public com.google.apps.script.type.HomepageExtensionPointOrBuilder
getHomepageTriggerOrBuilder() {
if (homepageTriggerBuilder_ != null) {
return homepageTriggerBuilder_.getMessageOrBuilder();
} else {
return homepageTrigger_ == null
? com.google.apps.script.type.HomepageExtensionPoint.getDefaultInstance()
: homepageTrigger_;
}
}
/**
*
*
* <pre>
* If present, this overrides the configuration from
* `addOns.common.homepageTrigger`.
* </pre>
*
* <code>.google.apps.script.type.HomepageExtensionPoint homepage_trigger = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.apps.script.type.HomepageExtensionPoint,
com.google.apps.script.type.HomepageExtensionPoint.Builder,
com.google.apps.script.type.HomepageExtensionPointOrBuilder>
getHomepageTriggerFieldBuilder() {
if (homepageTriggerBuilder_ == null) {
homepageTriggerBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.apps.script.type.HomepageExtensionPoint,
com.google.apps.script.type.HomepageExtensionPoint.Builder,
com.google.apps.script.type.HomepageExtensionPointOrBuilder>(
getHomepageTrigger(), getParentForChildren(), isClean());
homepageTrigger_ = null;
}
return homepageTriggerBuilder_;
}
private com.google.apps.script.type.docs.DocsExtensionPoint onFileScopeGrantedTrigger_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.apps.script.type.docs.DocsExtensionPoint,
com.google.apps.script.type.docs.DocsExtensionPoint.Builder,
com.google.apps.script.type.docs.DocsExtensionPointOrBuilder>
onFileScopeGrantedTriggerBuilder_;
/**
*
*
* <pre>
* Endpoint to execute when file scope authorization is granted
* for this document/user pair.
* </pre>
*
* <code>.google.apps.script.type.docs.DocsExtensionPoint on_file_scope_granted_trigger = 2;
* </code>
*
* @return Whether the onFileScopeGrantedTrigger field is set.
*/
public boolean hasOnFileScopeGrantedTrigger() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Endpoint to execute when file scope authorization is granted
* for this document/user pair.
* </pre>
*
* <code>.google.apps.script.type.docs.DocsExtensionPoint on_file_scope_granted_trigger = 2;
* </code>
*
* @return The onFileScopeGrantedTrigger.
*/
public com.google.apps.script.type.docs.DocsExtensionPoint getOnFileScopeGrantedTrigger() {
if (onFileScopeGrantedTriggerBuilder_ == null) {
return onFileScopeGrantedTrigger_ == null
? com.google.apps.script.type.docs.DocsExtensionPoint.getDefaultInstance()
: onFileScopeGrantedTrigger_;
} else {
return onFileScopeGrantedTriggerBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Endpoint to execute when file scope authorization is granted
* for this document/user pair.
* </pre>
*
* <code>.google.apps.script.type.docs.DocsExtensionPoint on_file_scope_granted_trigger = 2;
* </code>
*/
public Builder setOnFileScopeGrantedTrigger(
com.google.apps.script.type.docs.DocsExtensionPoint value) {
if (onFileScopeGrantedTriggerBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
onFileScopeGrantedTrigger_ = value;
} else {
onFileScopeGrantedTriggerBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Endpoint to execute when file scope authorization is granted
* for this document/user pair.
* </pre>
*
* <code>.google.apps.script.type.docs.DocsExtensionPoint on_file_scope_granted_trigger = 2;
* </code>
*/
public Builder setOnFileScopeGrantedTrigger(
com.google.apps.script.type.docs.DocsExtensionPoint.Builder builderForValue) {
if (onFileScopeGrantedTriggerBuilder_ == null) {
onFileScopeGrantedTrigger_ = builderForValue.build();
} else {
onFileScopeGrantedTriggerBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Endpoint to execute when file scope authorization is granted
* for this document/user pair.
* </pre>
*
* <code>.google.apps.script.type.docs.DocsExtensionPoint on_file_scope_granted_trigger = 2;
* </code>
*/
public Builder mergeOnFileScopeGrantedTrigger(
com.google.apps.script.type.docs.DocsExtensionPoint value) {
if (onFileScopeGrantedTriggerBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& onFileScopeGrantedTrigger_ != null
&& onFileScopeGrantedTrigger_
!= com.google.apps.script.type.docs.DocsExtensionPoint.getDefaultInstance()) {
getOnFileScopeGrantedTriggerBuilder().mergeFrom(value);
} else {
onFileScopeGrantedTrigger_ = value;
}
} else {
onFileScopeGrantedTriggerBuilder_.mergeFrom(value);
}
if (onFileScopeGrantedTrigger_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Endpoint to execute when file scope authorization is granted
* for this document/user pair.
* </pre>
*
* <code>.google.apps.script.type.docs.DocsExtensionPoint on_file_scope_granted_trigger = 2;
* </code>
*/
public Builder clearOnFileScopeGrantedTrigger() {
bitField0_ = (bitField0_ & ~0x00000002);
onFileScopeGrantedTrigger_ = null;
if (onFileScopeGrantedTriggerBuilder_ != null) {
onFileScopeGrantedTriggerBuilder_.dispose();
onFileScopeGrantedTriggerBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Endpoint to execute when file scope authorization is granted
* for this document/user pair.
* </pre>
*
* <code>.google.apps.script.type.docs.DocsExtensionPoint on_file_scope_granted_trigger = 2;
* </code>
*/
public com.google.apps.script.type.docs.DocsExtensionPoint.Builder
getOnFileScopeGrantedTriggerBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getOnFileScopeGrantedTriggerFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Endpoint to execute when file scope authorization is granted
* for this document/user pair.
* </pre>
*
* <code>.google.apps.script.type.docs.DocsExtensionPoint on_file_scope_granted_trigger = 2;
* </code>
*/
public com.google.apps.script.type.docs.DocsExtensionPointOrBuilder
getOnFileScopeGrantedTriggerOrBuilder() {
if (onFileScopeGrantedTriggerBuilder_ != null) {
return onFileScopeGrantedTriggerBuilder_.getMessageOrBuilder();
} else {
return onFileScopeGrantedTrigger_ == null
? com.google.apps.script.type.docs.DocsExtensionPoint.getDefaultInstance()
: onFileScopeGrantedTrigger_;
}
}
/**
*
*
* <pre>
* Endpoint to execute when file scope authorization is granted
* for this document/user pair.
* </pre>
*
* <code>.google.apps.script.type.docs.DocsExtensionPoint on_file_scope_granted_trigger = 2;
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.apps.script.type.docs.DocsExtensionPoint,
com.google.apps.script.type.docs.DocsExtensionPoint.Builder,
com.google.apps.script.type.docs.DocsExtensionPointOrBuilder>
getOnFileScopeGrantedTriggerFieldBuilder() {
if (onFileScopeGrantedTriggerBuilder_ == null) {
onFileScopeGrantedTriggerBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.apps.script.type.docs.DocsExtensionPoint,
com.google.apps.script.type.docs.DocsExtensionPoint.Builder,
com.google.apps.script.type.docs.DocsExtensionPointOrBuilder>(
getOnFileScopeGrantedTrigger(), getParentForChildren(), isClean());
onFileScopeGrantedTrigger_ = null;
}
return onFileScopeGrantedTriggerBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.apps.script.type.docs.DocsAddOnManifest)
}
// @@protoc_insertion_point(class_scope:google.apps.script.type.docs.DocsAddOnManifest)
private static final com.google.apps.script.type.docs.DocsAddOnManifest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.apps.script.type.docs.DocsAddOnManifest();
}
public static com.google.apps.script.type.docs.DocsAddOnManifest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DocsAddOnManifest> PARSER =
new com.google.protobuf.AbstractParser<DocsAddOnManifest>() {
@java.lang.Override
public DocsAddOnManifest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<DocsAddOnManifest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DocsAddOnManifest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.apps.script.type.docs.DocsAddOnManifest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/nifi | 35,980 | nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateCsv.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.standard;
import org.apache.nifi.annotation.behavior.InputRequirement;
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
import org.apache.nifi.annotation.behavior.SideEffectFree;
import org.apache.nifi.annotation.behavior.SupportsBatching;
import org.apache.nifi.annotation.behavior.WritesAttribute;
import org.apache.nifi.annotation.behavior.WritesAttributes;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.components.AllowableValue;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.components.PropertyValue;
import org.apache.nifi.components.ValidationContext;
import org.apache.nifi.components.ValidationResult;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.logging.ComponentLog;
import org.apache.nifi.processor.AbstractProcessor;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.util.StandardValidators;
import org.supercsv.cellprocessor.Optional;
import org.supercsv.cellprocessor.ParseBigDecimal;
import org.supercsv.cellprocessor.ParseBool;
import org.supercsv.cellprocessor.ParseChar;
import org.supercsv.cellprocessor.ParseDate;
import org.supercsv.cellprocessor.ParseDouble;
import org.supercsv.cellprocessor.ParseInt;
import org.supercsv.cellprocessor.ParseLong;
import org.supercsv.cellprocessor.constraint.DMinMax;
import org.supercsv.cellprocessor.constraint.Equals;
import org.supercsv.cellprocessor.constraint.ForbidSubStr;
import org.supercsv.cellprocessor.constraint.IsIncludedIn;
import org.supercsv.cellprocessor.constraint.LMinMax;
import org.supercsv.cellprocessor.constraint.NotNull;
import org.supercsv.cellprocessor.constraint.RequireHashCode;
import org.supercsv.cellprocessor.constraint.RequireSubStr;
import org.supercsv.cellprocessor.constraint.StrMinMax;
import org.supercsv.cellprocessor.constraint.StrNotNullOrEmpty;
import org.supercsv.cellprocessor.constraint.StrRegEx;
import org.supercsv.cellprocessor.constraint.Strlen;
import org.supercsv.cellprocessor.constraint.Unique;
import org.supercsv.cellprocessor.constraint.UniqueHashCode;
import org.supercsv.cellprocessor.ift.CellProcessor;
import org.supercsv.exception.SuperCsvException;
import org.supercsv.io.CsvListReader;
import org.supercsv.prefs.CsvPreference;
import org.supercsv.util.CsvContext;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.atomic.AtomicReference;
@SideEffectFree
@SupportsBatching
@InputRequirement(Requirement.INPUT_REQUIRED)
@Tags({"csv", "schema", "validation"})
@CapabilityDescription("Validates the contents of FlowFiles or a FlowFile attribute value against a user-specified CSV schema. " +
"Take a look at the additional documentation of this processor for some schema examples.")
@WritesAttributes({
@WritesAttribute(attribute = "count.valid.lines", description = "If line by line validation, number of valid lines extracted from the source data"),
@WritesAttribute(attribute = "count.invalid.lines", description = "If line by line validation, number of invalid lines extracted from the source data"),
@WritesAttribute(attribute = "count.total.lines", description = "If line by line validation, total number of lines in the source data"),
@WritesAttribute(attribute = "validation.error.message", description = "For flow files routed to invalid, message of the first validation error")
})
public class ValidateCsv extends AbstractProcessor {
private final static List<String> ALLOWED_OPERATORS = List.of(
"ParseBigDecimal", "ParseBool", "ParseChar", "ParseDate", "ParseDouble", "ParseInt", "ParseLong",
"Optional", "DMinMax", "Equals", "ForbidSubStr", "LMinMax", "NotNull", "Null", "RequireHashCode", "RequireSubStr",
"Strlen", "StrMinMax", "StrNotNullOrEmpty", "StrRegEx", "Unique", "UniqueHashCode", "IsIncludedIn"
);
private static final String ROUTE_WHOLE_FLOW_FILE = "FlowFile validation";
private static final String ROUTE_LINES_INDIVIDUALLY = "Line by line validation";
public static final AllowableValue VALIDATE_WHOLE_FLOWFILE = new AllowableValue(ROUTE_WHOLE_FLOW_FILE, ROUTE_WHOLE_FLOW_FILE,
"As soon as an error is found in the CSV file, the validation will stop and the whole flow file will be routed to the 'invalid'"
+ " relationship. This option offers best performances.");
public static final AllowableValue VALIDATE_LINES_INDIVIDUALLY = new AllowableValue(ROUTE_LINES_INDIVIDUALLY, ROUTE_LINES_INDIVIDUALLY,
"In case an error is found, the input CSV file will be split into two FlowFiles: one routed to the 'valid' "
+ "relationship containing all the correct lines and one routed to the 'invalid' relationship containing all "
+ "the incorrect lines. Take care if choosing this option while using Unique cell processors in schema definition:"
+ "the first occurrence will be considered valid and the next ones as invalid.");
public static final PropertyDescriptor SCHEMA = new PropertyDescriptor.Builder()
.name("validate-csv-schema")
.displayName("Schema")
.description("The schema to be used for validation. Is expected a comma-delimited string representing the cell "
+ "processors to apply. The following cell processors are allowed in the schema definition: "
+ ALLOWED_OPERATORS + ". Note: cell processors cannot be nested except with Optional. Schema is required if Header is false.")
.required(false)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.addValidator(StandardValidators.NON_EMPTY_EL_VALIDATOR)
.build();
public static final PropertyDescriptor HEADER = new PropertyDescriptor.Builder()
.name("validate-csv-header")
.displayName("Header")
.description("True if the incoming flow file contains a header to ignore, false otherwise.")
.required(true)
.defaultValue("true")
.allowableValues("true", "false")
.addValidator(StandardValidators.BOOLEAN_VALIDATOR)
.build();
public static final PropertyDescriptor QUOTE_CHARACTER = new PropertyDescriptor.Builder()
.name("validate-csv-quote")
.displayName("Quote character")
.description("Character used as 'quote' in the incoming data. Example: \"")
.required(true)
.defaultValue("\"")
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
public static final PropertyDescriptor MAX_LINES_PER_ROW = new PropertyDescriptor.Builder()
.name("Max Lines Per Row")
.description("""
The maximum number of lines that a row can span before an exception is thrown. This option allows
the processor to fail fast when encountering CSV with mismatching quotes - the normal behaviour
would be to continue reading until the matching quote is found, which could potentially mean reading
the whole file (and exhausting all available memory). Zero value will disable this option.
""")
.required(true)
.defaultValue("0")
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
.addValidator(StandardValidators.NON_NEGATIVE_INTEGER_VALIDATOR)
.build();
public static final PropertyDescriptor DELIMITER_CHARACTER = new PropertyDescriptor.Builder()
.name("validate-csv-delimiter")
.displayName("Delimiter character")
.description("Character used as 'delimiter' in the incoming data. Example: ,")
.required(true)
.defaultValue(",")
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
public static final PropertyDescriptor END_OF_LINE_CHARACTER = new PropertyDescriptor.Builder()
.name("validate-csv-eol")
.displayName("End of line symbols")
.description("Symbols used as 'end of line' in the incoming data. Example: \\n")
.required(true)
.defaultValue("\\n")
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
public static final PropertyDescriptor VALIDATION_STRATEGY = new PropertyDescriptor.Builder()
.name("validate-csv-strategy")
.displayName("Validation strategy")
.description("Strategy to apply when routing input files to output relationships.")
.required(true)
.defaultValue(VALIDATE_WHOLE_FLOWFILE)
.allowableValues(VALIDATE_LINES_INDIVIDUALLY, VALIDATE_WHOLE_FLOWFILE)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
public static final PropertyDescriptor CSV_SOURCE_ATTRIBUTE = new PropertyDescriptor.Builder()
.name("CSV Source Attribute")
.description("The name of the attribute containing CSV data to be validated. If this property is blank, the FlowFile content will be validated.")
.required(false)
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
.addValidator(StandardValidators.ATTRIBUTE_KEY_VALIDATOR)
.dependsOn(VALIDATION_STRATEGY, VALIDATE_WHOLE_FLOWFILE)
.build();
public static final PropertyDescriptor INCLUDE_ALL_VIOLATIONS = new PropertyDescriptor.Builder()
.name("validate-csv-violations")
.displayName("Include all violations")
.description("If true, the validation.error.message attribute would include the list of all the violations"
+ " for the first invalid line. Note that setting this property to true would slightly decrease"
+ " the performances as all columns would be validated. If false, a line is invalid as soon as a"
+ " column is found violating the specified constraint and only this violation for the first invalid"
+ " line will be included in the validation.error.message attribute.")
.required(true)
.allowableValues("true", "false")
.defaultValue("false")
.build();
private static final List<PropertyDescriptor> PROPERTY_DESCRIPTORS = List.of(
SCHEMA,
CSV_SOURCE_ATTRIBUTE,
HEADER,
DELIMITER_CHARACTER,
QUOTE_CHARACTER,
MAX_LINES_PER_ROW,
END_OF_LINE_CHARACTER,
VALIDATION_STRATEGY,
INCLUDE_ALL_VIOLATIONS
);
public static final Relationship REL_VALID = new Relationship.Builder()
.name("valid")
.description("FlowFiles that are successfully validated against the schema are routed to this relationship")
.build();
public static final Relationship REL_INVALID = new Relationship.Builder()
.name("invalid")
.description("FlowFiles that are not valid according to the specified schema,"
+ " or no schema or CSV header can be identified, are routed to this relationship")
.build();
private static final Set<Relationship> RELATIONSHIPS = Set.of(
REL_VALID,
REL_INVALID
);
@Override
public Set<Relationship> getRelationships() {
return RELATIONSHIPS;
}
@Override
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
return PROPERTY_DESCRIPTORS;
}
@Override
protected Collection<ValidationResult> customValidate(ValidationContext context) {
PropertyValue schemaProp = context.getProperty(SCHEMA);
PropertyValue headerProp = context.getProperty(HEADER);
String schema = schemaProp.getValue();
String subject = SCHEMA.getName();
if (context.isExpressionLanguageSupported(subject) && context.isExpressionLanguagePresent(schema)) {
return List.of(new ValidationResult.Builder().subject(subject).input(schema).explanation("Expression Language Present").valid(true).build());
}
// If no Expression Language is present, try parsing the schema
try {
if (schema != null) {
this.parseSchema(schema);
} else if (!headerProp.asBoolean()) {
throw(new Exception("Schema cannot be empty if Header property is false."));
}
} catch (Exception e) {
final List<ValidationResult> problems = new ArrayList<>(1);
problems.add(new ValidationResult.Builder().subject(subject)
.input(schema)
.valid(false)
.explanation("Error while parsing the schema: " + e.getMessage())
.build());
return problems;
}
return super.customValidate(context);
}
public CsvPreference getPreference(final ProcessContext context, final FlowFile flowFile) {
// When going from the UI to Java, the characters are escaped so that what you
// input is transferred over to Java as is. So when you type the characters "\"
// and "n" into the UI the Java string will end up being those two characters
// not the interpreted value "\n".
final String msgDemarcator = context.getProperty(END_OF_LINE_CHARACTER)
.evaluateAttributeExpressions(flowFile)
.getValue()
.replace("\\n", "\n")
.replace("\\r", "\r")
.replace("\\t", "\t");
final char quoteChar = context.getProperty(QUOTE_CHARACTER)
.evaluateAttributeExpressions(flowFile)
.getValue()
.charAt(0);
final int delimiterChar = context.getProperty(DELIMITER_CHARACTER)
.evaluateAttributeExpressions(flowFile)
.getValue()
.charAt(0);
final int maxLinesPerRow = context.getProperty(MAX_LINES_PER_ROW).asInteger();
return new CsvPreference.Builder(quoteChar, delimiterChar, msgDemarcator)
.maxLinesPerRow(maxLinesPerRow)
.build();
}
/**
* Method used to parse the string supplied by the user. The string is converted
* to a list of cell processors used to validate the CSV data.
* @param schema Schema to parse
*/
private CellProcessor[] parseSchema(String schema) {
List<CellProcessor> processorsList = new ArrayList<>();
String remaining = schema;
while (!remaining.isEmpty()) {
remaining = setProcessor(remaining, processorsList);
}
return processorsList.toArray(new CellProcessor[0]);
}
private String setProcessor(String remaining, List<CellProcessor> processorsList) {
StringBuilder buffer = new StringBuilder();
String inputString = remaining;
int i = 0;
int opening = 0;
int closing = 0;
while (buffer.length() != inputString.length()) {
char c = remaining.charAt(i);
i++;
if (opening == 0 && c == ',') {
if (i == 1) {
inputString = inputString.substring(1);
continue;
}
break;
}
buffer.append(c);
if (c == '(') {
opening++;
} else if (c == ')') {
closing++;
}
if (opening > 0 && opening == closing) {
break;
}
}
final String procString = buffer.toString().trim();
opening = procString.indexOf('(');
String method = procString;
String argument = null;
if (opening != -1) {
argument = method.substring(opening + 1, method.length() - 1);
method = method.substring(0, opening);
}
processorsList.add(getProcessor(method.toLowerCase(), argument));
return remaining.substring(i);
}
private CellProcessor getProcessor(String method, String argument) {
switch (method) {
case "optional":
int opening = argument.indexOf('(');
String subMethod = argument;
String subArgument = null;
if (opening != -1) {
subArgument = subMethod.substring(opening + 1, subMethod.length() - 1);
subMethod = subMethod.substring(0, opening);
}
return new Optional(getProcessor(subMethod.toLowerCase(), subArgument));
case "parsedate":
return new ParseDate(argument.substring(1, argument.length() - 1));
case "parsedouble":
if (argument != null && !argument.isEmpty())
throw new IllegalArgumentException("ParseDouble does not expect any argument but has " + argument);
return new ParseDouble();
case "parsebigdecimal":
if (argument != null && !argument.isEmpty())
throw new IllegalArgumentException("ParseBigDecimal does not expect any argument but has " + argument);
return new ParseBigDecimal();
case "parsebool":
if (argument != null && !argument.isEmpty())
throw new IllegalArgumentException("ParseBool does not expect any argument but has " + argument);
return new ParseBool();
case "parsechar":
if (argument != null && !argument.isEmpty())
throw new IllegalArgumentException("ParseChar does not expect any argument but has " + argument);
return new ParseChar();
case "parseint":
if (argument != null && !argument.isEmpty())
throw new IllegalArgumentException("ParseInt does not expect any argument but has " + argument);
return new ParseInt();
case "parselong":
if (argument != null && !argument.isEmpty())
throw new IllegalArgumentException("ParseLong does not expect any argument but has " + argument);
return new ParseLong();
case "notnull":
if (argument != null && !argument.isEmpty())
throw new IllegalArgumentException("NotNull does not expect any argument but has " + argument);
return new NotNull();
case "strregex":
return new StrRegEx(argument.substring(1, argument.length() - 1));
case "unique":
if (argument != null && !argument.isEmpty())
throw new IllegalArgumentException("Unique does not expect any argument but has " + argument);
return new Unique();
case "uniquehashcode":
if (argument != null && !argument.isEmpty())
throw new IllegalArgumentException("UniqueHashCode does not expect any argument but has " + argument);
return new UniqueHashCode();
case "strlen":
String[] splts = argument.split(",");
int[] requiredLengths = new int[splts.length];
for (int i = 0; i < splts.length; i++) {
requiredLengths[i] = Integer.parseInt(splts[i]);
}
return new Strlen(requiredLengths);
case "strminmax":
String[] splits = argument.split(",");
return new StrMinMax(Long.parseLong(splits[0]), Long.parseLong(splits[1]));
case "lminmax":
String[] args = argument.split(",");
return new LMinMax(Long.parseLong(args[0]), Long.parseLong(args[1]));
case "dminmax":
String[] doubles = argument.split(",");
return new DMinMax(Double.parseDouble(doubles[0]), Double.parseDouble(doubles[1]));
case "equals":
if (argument != null && !argument.isEmpty())
throw new IllegalArgumentException("Equals does not expect any argument but has " + argument);
return new Equals();
case "forbidsubstr":
String[] forbiddenSubStrings = argument.replaceAll("\"", "").split(",[ ]*");
return new ForbidSubStr(forbiddenSubStrings);
case "requiresubstr":
String[] requiredSubStrings = argument.replaceAll("\"", "").split(",[ ]*");
return new RequireSubStr(requiredSubStrings);
case "strnotnullorempty":
if (argument != null && !argument.isEmpty())
throw new IllegalArgumentException("StrNotNullOrEmpty does not expect any argument but has " + argument);
return new StrNotNullOrEmpty();
case "requirehashcode":
String[] hashs = argument.split(",");
int[] hashcodes = new int[hashs.length];
for (int i = 0; i < hashs.length; i++) {
hashcodes[i] = Integer.parseInt(hashs[i]);
}
return new RequireHashCode(hashcodes);
case "null":
if (argument != null && !argument.isEmpty())
throw new IllegalArgumentException("Null does not expect any argument but has " + argument);
return null;
case "isincludedin":
String[] elements = argument.replaceAll("\"", "").split(",[ ]*");
return new IsIncludedIn(elements);
default:
throw new IllegalArgumentException("[" + method + "] is not an allowed method to define a Cell Processor");
}
}
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
final CsvPreference csvPref = getPreference(context, flowFile);
final boolean header = context.getProperty(HEADER).asBoolean();
final ComponentLog logger = getLogger();
String schema = context.getProperty(SCHEMA).evaluateAttributeExpressions(flowFile).getValue();
CellProcessor[] cellProcs = null;
if (schema != null) {
cellProcs = this.parseSchema(schema);
}
final String validationStrategy = context.getProperty(VALIDATION_STRATEGY).getValue();
final boolean isWholeFFValidation = !validationStrategy.equals(VALIDATE_LINES_INDIVIDUALLY.getValue());
final boolean includeAllViolations = context.getProperty(INCLUDE_ALL_VIOLATIONS).asBoolean();
boolean valid = true;
int okCount = 0;
int totalCount = 0;
FlowFile invalidFF = null;
FlowFile validFF = null;
String validationError = null;
final AtomicReference<Boolean> isFirstLineValid = new AtomicReference<>(true);
final AtomicReference<Boolean> isFirstLineInvalid = new AtomicReference<>(true);
if (!isWholeFFValidation) {
invalidFF = session.create(flowFile);
validFF = session.create(flowFile);
}
InputStream stream;
if (isWholeFFValidation && context.getProperty(CSV_SOURCE_ATTRIBUTE).isSet()) {
String csvAttribute = flowFile.getAttribute(context.getProperty(CSV_SOURCE_ATTRIBUTE).evaluateAttributeExpressions().getValue());
stream = new ByteArrayInputStream(Objects.requireNonNullElse(csvAttribute, "").getBytes(StandardCharsets.UTF_8));
} else {
stream = session.read(flowFile);
}
stream: try (final NifiCsvListReader listReader = new NifiCsvListReader(new InputStreamReader(stream), csvPref)) {
// handling of header
if (header) {
// read header
List<String> headers = listReader.read();
if (schema == null) {
if (headers != null && !headers.isEmpty()) {
String newSchema = "Optional(StrNotNullOrEmpty()),".repeat(headers.size());
schema = newSchema.substring(0, newSchema.length() - 1);
cellProcs = this.parseSchema(schema);
} else {
validationError = "No schema or CSV header could be identified.";
valid = false;
break stream;
}
}
if (!isWholeFFValidation) {
invalidFF = session.append(invalidFF, out -> out.write(print(listReader.getUntokenizedRow(), csvPref, true)));
validFF = session.append(validFF, out -> out.write(print(listReader.getUntokenizedRow(), csvPref, true)));
isFirstLineValid.set(false);
isFirstLineInvalid.set(false);
}
}
boolean stop = false;
while (!stop) {
try {
// read next row and check if no more row
stop = listReader.read(includeAllViolations && valid, cellProcs) == null;
if (!isWholeFFValidation && !stop) {
validFF = session.append(validFF, out -> out.write(print(listReader.getUntokenizedRow(), csvPref, isFirstLineValid.get())));
okCount++;
if (isFirstLineValid.get()) {
isFirstLineValid.set(false);
}
}
} catch (final SuperCsvException e) {
valid = false;
if (isWholeFFValidation) {
validationError = e.getLocalizedMessage();
logger.debug("Failed to validate {} against schema due to {}; routing to 'invalid'", flowFile, e);
break;
} else {
// we append the invalid line to the flow file that will be routed to invalid relationship
invalidFF = session.append(invalidFF, out -> out.write(print(listReader.getUntokenizedRow(), csvPref, isFirstLineInvalid.get())));
if (isFirstLineInvalid.get()) {
isFirstLineInvalid.set(false);
}
if (validationError == null) {
validationError = e.getLocalizedMessage();
}
}
} finally {
if (!isWholeFFValidation) {
totalCount++;
}
}
}
} catch (final IOException e) {
valid = false;
logger.error("Failed to validate {} against schema due to {}", flowFile, e);
}
if (isWholeFFValidation) {
if (valid) {
logger.debug("Successfully validated {} against schema; routing to 'valid'", flowFile);
session.getProvenanceReporter().route(flowFile, REL_VALID);
session.transfer(flowFile, REL_VALID);
} else {
session.getProvenanceReporter().route(flowFile, REL_INVALID);
session.putAttribute(flowFile, "validation.error.message", validationError);
session.transfer(flowFile, REL_INVALID);
}
} else {
if (valid) {
logger.debug("Successfully validated {} against schema; routing to 'valid'", validFF);
session.getProvenanceReporter().route(validFF, REL_VALID, "All " + totalCount + " line(s) are valid");
session.putAttribute(validFF, "count.valid.lines", Integer.toString(totalCount));
session.putAttribute(validFF, "count.total.lines", Integer.toString(totalCount));
session.transfer(validFF, REL_VALID);
session.remove(invalidFF);
session.remove(flowFile);
} else if (okCount != 0) {
// because of the finally within the 'while' loop
totalCount--;
logger.debug("Successfully validated {}/{} line(s) in {} against schema; routing valid lines to 'valid' and invalid lines to 'invalid'",
okCount, totalCount, flowFile);
session.getProvenanceReporter().route(validFF, REL_VALID, okCount + " valid line(s)");
session.putAttribute(validFF, "count.total.lines", Integer.toString(totalCount));
session.putAttribute(validFF, "count.valid.lines", Integer.toString(okCount));
session.transfer(validFF, REL_VALID);
session.getProvenanceReporter().route(invalidFF, REL_INVALID, (totalCount - okCount) + " invalid line(s)");
session.putAttribute(invalidFF, "count.invalid.lines", Integer.toString((totalCount - okCount)));
session.putAttribute(invalidFF, "count.total.lines", Integer.toString(totalCount));
session.putAttribute(invalidFF, "validation.error.message", validationError);
session.transfer(invalidFF, REL_INVALID);
session.remove(flowFile);
} else {
logger.debug("All lines in {} are invalid; routing to 'invalid'", invalidFF);
session.getProvenanceReporter().route(invalidFF, REL_INVALID, "All " + totalCount + " line(s) are invalid");
session.putAttribute(invalidFF, "count.invalid.lines", Integer.toString(totalCount));
session.putAttribute(invalidFF, "count.total.lines", Integer.toString(totalCount));
session.putAttribute(invalidFF, "validation.error.message", validationError);
session.transfer(invalidFF, REL_INVALID);
session.remove(validFF);
session.remove(flowFile);
}
}
}
private byte[] print(String row, CsvPreference csvPref, boolean isFirstLine) {
StringBuffer buffer = new StringBuffer();
if (!isFirstLine) {
buffer.append(csvPref.getEndOfLineSymbols());
}
return buffer.append(row).toString().getBytes();
}
/**
* This is required to avoid the side effect of Parse* cell processors. If not overriding
* this method, parsing will return objects and writing objects could result in a different
* output in comparison to the input.
*/
private class NifiCsvListReader extends CsvListReader {
public NifiCsvListReader(Reader reader, CsvPreference preferences) {
super(reader, preferences);
}
public List<Object> read(boolean includeAllViolations, CellProcessor... processors) throws IOException {
if ( processors == null ) {
throw new NullPointerException("Processors should not be null");
}
if ( readRow() ) {
executeProcessors(new ArrayList<>(getColumns().size()), processors, includeAllViolations);
return new ArrayList<>(getColumns());
}
return null; // EOF
}
protected List<Object> executeProcessors(List<Object> processedColumns, CellProcessor[] processors, boolean includeAllViolations) {
this.executeCellProcessors(processedColumns, getColumns(), processors, getLineNumber(), getRowNumber(), includeAllViolations);
return processedColumns;
}
private void executeCellProcessors(final List<Object> destination, final List<?> source,
final CellProcessor[] processors, final int lineNo, final int rowNo, boolean includeAllViolations) {
// the context used when cell processors report exceptions
final CsvContext context = new CsvContext(lineNo, rowNo, 1);
context.setRowSource(new ArrayList<>(source));
if (source.size() != processors.length) {
throw new SuperCsvException(String.format(
"The number of columns to be processed (%d) must match the number of CellProcessors (%d): check that the number"
+ " of CellProcessors you have defined matches the expected number of columns being read/written",
source.size(), processors.length), context);
}
destination.clear();
List<String> errors = new ArrayList<>();
for (int i = 0; i < source.size(); i++) {
try {
context.setColumnNumber(i + 1); // update context (columns start at 1)
if (processors[i] == null) {
destination.add(source.get(i)); // no processing required
} else {
destination.add(processors[i].execute(source.get(i), context)); // execute the processor chain
}
} catch (SuperCsvException e) {
if (includeAllViolations) {
if (errors.isEmpty()) {
errors.add(String.format("At {line=%d, row=%d}", e.getCsvContext().getLineNumber(), e.getCsvContext().getRowNumber()));
}
final String coordinates = String.format("{column=%d}", e.getCsvContext().getColumnNumber());
final String errorMessage = e.getLocalizedMessage() + " at " + coordinates;
errors.add(errorMessage);
} else {
final String coordinates = String.format("{line=%d, row=%d, column=%d}", e.getCsvContext().getLineNumber(),
e.getCsvContext().getRowNumber(), e.getCsvContext().getColumnNumber());
final String errorMessage = e.getLocalizedMessage() + " at " + coordinates;
throw new SuperCsvException(errorMessage);
}
}
}
if (!errors.isEmpty()) {
throw new SuperCsvException(String.join(", ", errors));
}
}
}
}
|
apache/iceberg | 35,655 | api/src/main/java/org/apache/iceberg/types/Types.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iceberg.types;
import java.io.ObjectStreamException;
import java.io.Serializable;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.iceberg.Schema;
import org.apache.iceberg.expressions.Expressions;
import org.apache.iceberg.expressions.Literal;
import org.apache.iceberg.relocated.com.google.common.base.Joiner;
import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.types.Type.NestedType;
import org.apache.iceberg.types.Type.PrimitiveType;
public class Types {
private Types() {}
private static final ImmutableMap<String, Type> TYPES =
ImmutableMap.<String, Type>builder()
.put(BooleanType.get().toString(), BooleanType.get())
.put(IntegerType.get().toString(), IntegerType.get())
.put(LongType.get().toString(), LongType.get())
.put(FloatType.get().toString(), FloatType.get())
.put(DoubleType.get().toString(), DoubleType.get())
.put(DateType.get().toString(), DateType.get())
.put(TimeType.get().toString(), TimeType.get())
.put(TimestampType.withZone().toString(), TimestampType.withZone())
.put(TimestampType.withoutZone().toString(), TimestampType.withoutZone())
.put(TimestampNanoType.withZone().toString(), TimestampNanoType.withZone())
.put(TimestampNanoType.withoutZone().toString(), TimestampNanoType.withoutZone())
.put(StringType.get().toString(), StringType.get())
.put(UUIDType.get().toString(), UUIDType.get())
.put(BinaryType.get().toString(), BinaryType.get())
.put(UnknownType.get().toString(), UnknownType.get())
.put(VariantType.get().toString(), VariantType.get())
.put(GeometryType.crs84().toString(), GeometryType.crs84())
.put(GeographyType.crs84().toString(), GeographyType.crs84())
.buildOrThrow();
private static final Pattern FIXED = Pattern.compile("fixed\\[\\s*(\\d+)\\s*\\]");
private static final Pattern GEOMETRY_PARAMETERS =
Pattern.compile("geometry\\s*(?:\\(\\s*([^)]*?)\\s*\\))?", Pattern.CASE_INSENSITIVE);
private static final Pattern GEOGRAPHY_PARAMETERS =
Pattern.compile(
"geography\\s*(?:\\(\\s*([^,]*?)\\s*(?:,\\s*(\\w*)\\s*)?\\))?", Pattern.CASE_INSENSITIVE);
private static final Pattern DECIMAL =
Pattern.compile("decimal\\(\\s*(\\d+)\\s*,\\s*(\\d+)\\s*\\)");
public static Type fromTypeName(String typeString) {
String lowerTypeString = typeString.toLowerCase(Locale.ROOT);
if (TYPES.containsKey(lowerTypeString)) {
return TYPES.get(lowerTypeString);
}
Matcher geometry = GEOMETRY_PARAMETERS.matcher(typeString);
if (geometry.matches()) {
String crs = geometry.group(1);
Preconditions.checkArgument(!crs.contains(","), "Invalid CRS: %s", crs);
return GeometryType.of(crs);
}
Matcher geography = GEOGRAPHY_PARAMETERS.matcher(typeString);
if (geography.matches()) {
String crs = geography.group(1);
String algorithmName = geography.group(2);
EdgeAlgorithm algorithm =
algorithmName == null ? null : EdgeAlgorithm.fromName(algorithmName);
return GeographyType.of(crs, algorithm);
}
Matcher fixed = FIXED.matcher(lowerTypeString);
if (fixed.matches()) {
return FixedType.ofLength(Integer.parseInt(fixed.group(1)));
}
Matcher decimal = DECIMAL.matcher(lowerTypeString);
if (decimal.matches()) {
return DecimalType.of(Integer.parseInt(decimal.group(1)), Integer.parseInt(decimal.group(2)));
}
throw new IllegalArgumentException("Cannot parse type string to primitive: " + typeString);
}
public static PrimitiveType fromPrimitiveString(String typeString) {
Type type = fromTypeName(typeString);
if (type.isPrimitiveType()) {
return type.asPrimitiveType();
}
throw new IllegalArgumentException("Cannot parse type string: variant is not a primitive type");
}
public static class BooleanType extends PrimitiveType {
private static final BooleanType INSTANCE = new BooleanType();
public static BooleanType get() {
return INSTANCE;
}
@Override
public TypeID typeId() {
return TypeID.BOOLEAN;
}
@Override
public String toString() {
return "boolean";
}
}
public static class IntegerType extends PrimitiveType {
private static final IntegerType INSTANCE = new IntegerType();
public static IntegerType get() {
return INSTANCE;
}
@Override
public TypeID typeId() {
return TypeID.INTEGER;
}
@Override
public String toString() {
return "int";
}
}
public static class LongType extends PrimitiveType {
private static final LongType INSTANCE = new LongType();
public static LongType get() {
return INSTANCE;
}
@Override
public TypeID typeId() {
return TypeID.LONG;
}
@Override
public String toString() {
return "long";
}
}
public static class FloatType extends PrimitiveType {
private static final FloatType INSTANCE = new FloatType();
public static FloatType get() {
return INSTANCE;
}
@Override
public TypeID typeId() {
return TypeID.FLOAT;
}
@Override
public String toString() {
return "float";
}
}
public static class DoubleType extends PrimitiveType {
private static final DoubleType INSTANCE = new DoubleType();
public static DoubleType get() {
return INSTANCE;
}
@Override
public TypeID typeId() {
return TypeID.DOUBLE;
}
@Override
public String toString() {
return "double";
}
}
public static class DateType extends PrimitiveType {
private static final DateType INSTANCE = new DateType();
public static DateType get() {
return INSTANCE;
}
@Override
public TypeID typeId() {
return TypeID.DATE;
}
@Override
public String toString() {
return "date";
}
}
public static class TimeType extends PrimitiveType {
private static final TimeType INSTANCE = new TimeType();
public static TimeType get() {
return INSTANCE;
}
private TimeType() {}
@Override
public TypeID typeId() {
return TypeID.TIME;
}
@Override
public String toString() {
return "time";
}
}
public static class TimestampType extends PrimitiveType {
private static final TimestampType INSTANCE_WITH_ZONE = new TimestampType(true);
private static final TimestampType INSTANCE_WITHOUT_ZONE = new TimestampType(false);
public static TimestampType withZone() {
return INSTANCE_WITH_ZONE;
}
public static TimestampType withoutZone() {
return INSTANCE_WITHOUT_ZONE;
}
private final boolean adjustToUTC;
private TimestampType(boolean adjustToUTC) {
this.adjustToUTC = adjustToUTC;
}
public boolean shouldAdjustToUTC() {
return adjustToUTC;
}
@Override
public TypeID typeId() {
return TypeID.TIMESTAMP;
}
@Override
public String toString() {
if (shouldAdjustToUTC()) {
return "timestamptz";
} else {
return "timestamp";
}
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
} else if (!(o instanceof TimestampType)) {
return false;
}
TimestampType timestampType = (TimestampType) o;
return adjustToUTC == timestampType.adjustToUTC;
}
@Override
public int hashCode() {
return Objects.hash(TimestampType.class, adjustToUTC);
}
}
public static class TimestampNanoType extends PrimitiveType {
private static final TimestampNanoType INSTANCE_WITH_ZONE = new TimestampNanoType(true);
private static final TimestampNanoType INSTANCE_WITHOUT_ZONE = new TimestampNanoType(false);
public static TimestampNanoType withZone() {
return INSTANCE_WITH_ZONE;
}
public static TimestampNanoType withoutZone() {
return INSTANCE_WITHOUT_ZONE;
}
private final boolean adjustToUTC;
private TimestampNanoType(boolean adjustToUTC) {
this.adjustToUTC = adjustToUTC;
}
public boolean shouldAdjustToUTC() {
return adjustToUTC;
}
@Override
public TypeID typeId() {
return TypeID.TIMESTAMP_NANO;
}
@Override
public String toString() {
if (shouldAdjustToUTC()) {
return "timestamptz_ns";
} else {
return "timestamp_ns";
}
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
} else if (!(other instanceof TimestampNanoType)) {
return false;
}
return adjustToUTC == ((TimestampNanoType) other).adjustToUTC;
}
@Override
public int hashCode() {
return Objects.hash(TimestampNanoType.class, adjustToUTC);
}
}
public static class StringType extends PrimitiveType {
private static final StringType INSTANCE = new StringType();
public static StringType get() {
return INSTANCE;
}
@Override
public TypeID typeId() {
return TypeID.STRING;
}
@Override
public String toString() {
return "string";
}
}
public static class UUIDType extends PrimitiveType {
private static final UUIDType INSTANCE = new UUIDType();
public static UUIDType get() {
return INSTANCE;
}
@Override
public TypeID typeId() {
return TypeID.UUID;
}
@Override
public String toString() {
return "uuid";
}
}
public static class FixedType extends PrimitiveType {
public static FixedType ofLength(int length) {
return new FixedType(length);
}
private final int length;
private FixedType(int length) {
this.length = length;
}
public int length() {
return length;
}
@Override
public TypeID typeId() {
return TypeID.FIXED;
}
@Override
public String toString() {
return String.format(Locale.ROOT, "fixed[%d]", length);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
} else if (!(o instanceof FixedType)) {
return false;
}
FixedType fixedType = (FixedType) o;
return length == fixedType.length;
}
@Override
public int hashCode() {
return Objects.hash(FixedType.class, length);
}
}
public static class BinaryType extends PrimitiveType {
private static final BinaryType INSTANCE = new BinaryType();
public static BinaryType get() {
return INSTANCE;
}
@Override
public TypeID typeId() {
return TypeID.BINARY;
}
@Override
public String toString() {
return "binary";
}
}
public static class VariantType implements Type {
private static final VariantType INSTANCE = new VariantType();
public static VariantType get() {
return INSTANCE;
}
@Override
public TypeID typeId() {
return TypeID.VARIANT;
}
@Override
public String toString() {
return "variant";
}
@Override
public boolean isVariantType() {
return true;
}
@Override
public VariantType asVariantType() {
return this;
}
Object writeReplace() throws ObjectStreamException {
return new PrimitiveLikeHolder(toString());
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
} else if (!(o instanceof VariantType)) {
return false;
}
VariantType that = (VariantType) o;
return typeId() == that.typeId();
}
@Override
public int hashCode() {
return Objects.hash(VariantType.class, typeId());
}
}
public static class UnknownType extends PrimitiveType {
private static final UnknownType INSTANCE = new UnknownType();
public static UnknownType get() {
return INSTANCE;
}
@Override
public TypeID typeId() {
return TypeID.UNKNOWN;
}
@Override
public String toString() {
return "unknown";
}
}
public static class DecimalType extends PrimitiveType {
public static DecimalType of(int precision, int scale) {
return new DecimalType(precision, scale);
}
private final int scale;
private final int precision;
private DecimalType(int precision, int scale) {
Preconditions.checkArgument(
precision <= 38,
"Decimals with precision larger than 38 are not supported: %s",
precision);
this.scale = scale;
this.precision = precision;
}
public int scale() {
return scale;
}
public int precision() {
return precision;
}
@Override
public TypeID typeId() {
return TypeID.DECIMAL;
}
@Override
public String toString() {
return String.format(Locale.ROOT, "decimal(%d, %d)", precision, scale);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
} else if (!(o instanceof DecimalType)) {
return false;
}
DecimalType that = (DecimalType) o;
if (scale != that.scale) {
return false;
}
return precision == that.precision;
}
@Override
public int hashCode() {
return Objects.hash(DecimalType.class, scale, precision);
}
}
public static class GeometryType extends PrimitiveType {
public static final String DEFAULT_CRS = "OGC:CRS84";
public static GeometryType crs84() {
return new GeometryType();
}
public static GeometryType of(String crs) {
return new GeometryType(crs);
}
private final String crs;
private GeometryType() {
crs = null;
}
private GeometryType(String crs) {
Preconditions.checkArgument(crs == null || !crs.isEmpty(), "Invalid CRS: (empty string)");
this.crs = DEFAULT_CRS.equalsIgnoreCase(crs) ? null : crs;
}
@Override
public TypeID typeId() {
return TypeID.GEOMETRY;
}
public String crs() {
return crs;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
} else if (!(o instanceof GeometryType)) {
return false;
}
GeometryType that = (GeometryType) o;
return Objects.equals(crs, that.crs);
}
@Override
public int hashCode() {
return Objects.hash(GeometryType.class, crs);
}
@Override
public String toString() {
if (crs == null) {
return "geometry";
}
return String.format("geometry(%s)", crs);
}
}
public static class GeographyType extends PrimitiveType {
public static final String DEFAULT_CRS = "OGC:CRS84";
public static GeographyType crs84() {
return new GeographyType();
}
public static GeographyType of(String crs) {
return new GeographyType(crs, null);
}
public static GeographyType of(String crs, EdgeAlgorithm algorithm) {
return new GeographyType(crs, algorithm);
}
private final String crs;
private final EdgeAlgorithm algorithm;
private GeographyType() {
this.crs = null;
this.algorithm = null;
}
private GeographyType(String crs, EdgeAlgorithm algorithm) {
Preconditions.checkArgument(crs == null || !crs.isEmpty(), "Invalid CRS: (empty string)");
this.crs = DEFAULT_CRS.equalsIgnoreCase(crs) ? null : crs;
this.algorithm = algorithm;
}
@Override
public TypeID typeId() {
return TypeID.GEOGRAPHY;
}
public String crs() {
return crs;
}
public EdgeAlgorithm algorithm() {
return algorithm;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
} else if (!(o instanceof GeographyType)) {
return false;
}
GeographyType that = (GeographyType) o;
return Objects.equals(crs, that.crs) && Objects.equals(algorithm, that.algorithm);
}
@Override
public int hashCode() {
return Objects.hash(GeographyType.class, crs, algorithm);
}
@Override
public String toString() {
if (algorithm != null) {
return String.format("geography(%s, %s)", crs != null ? crs : DEFAULT_CRS, algorithm);
} else if (crs != null) {
return String.format("geography(%s)", crs);
} else {
return "geography";
}
}
}
public static class NestedField implements Serializable {
public static NestedField optional(int id, String name, Type type) {
return new NestedField(true, id, name, type, null, null, null);
}
public static NestedField optional(int id, String name, Type type, String doc) {
return new NestedField(true, id, name, type, doc, null, null);
}
public static NestedField required(int id, String name, Type type) {
return new NestedField(false, id, name, type, null, null, null);
}
public static NestedField required(int id, String name, Type type, String doc) {
return new NestedField(false, id, name, type, doc, null, null);
}
/**
* Create a nested field.
*
* @deprecated will be removed in 2.0.0; use {@link #builder()} instead.
*/
@Deprecated
public static NestedField of(int id, boolean isOptional, String name, Type type) {
return new NestedField(isOptional, id, name, type, null, null, null);
}
/**
* Create a nested field.
*
* @deprecated will be removed in 2.0.0; use {@link #builder()} instead.
*/
@Deprecated
public static NestedField of(int id, boolean isOptional, String name, Type type, String doc) {
return new NestedField(isOptional, id, name, type, doc, null, null);
}
public static Builder from(NestedField field) {
return new Builder(field);
}
public static Builder required(String name) {
return new Builder(false, name);
}
public static Builder optional(String name) {
return new Builder(true, name);
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private boolean isOptional = true;
private String name = null;
private Integer id = null;
private Type type = null;
private String doc = null;
private Literal<?> initialDefault = null;
private Literal<?> writeDefault = null;
private Builder() {}
private Builder(boolean isFieldOptional, String fieldName) {
isOptional = isFieldOptional;
name = fieldName;
}
private Builder(NestedField toCopy) {
this.isOptional = toCopy.isOptional;
this.name = toCopy.name;
this.id = toCopy.id;
this.type = toCopy.type;
this.doc = toCopy.doc;
this.initialDefault = toCopy.initialDefault;
this.writeDefault = toCopy.writeDefault;
}
public Builder asRequired() {
this.isOptional = false;
return this;
}
public Builder asOptional() {
this.isOptional = true;
return this;
}
public Builder isOptional(boolean fieldIsOptional) {
this.isOptional = fieldIsOptional;
return this;
}
public Builder withName(String fieldName) {
this.name = fieldName;
return this;
}
public Builder withId(int fieldId) {
id = fieldId;
return this;
}
public Builder ofType(Type fieldType) {
type = fieldType;
return this;
}
public Builder withDoc(String fieldDoc) {
doc = fieldDoc;
return this;
}
/**
* Set the initial default using an Object.
*
* @deprecated will be removed in 2.0.0; use {@link #withInitialDefault(Literal)} instead.
*/
@Deprecated
public Builder withInitialDefault(Object fieldInitialDefault) {
return withInitialDefault(Expressions.lit(fieldInitialDefault));
}
public Builder withInitialDefault(Literal<?> fieldInitialDefault) {
initialDefault = fieldInitialDefault;
return this;
}
/**
* Set the write default using an Object.
*
* @deprecated will be removed in 2.0.0; use {@link #withWriteDefault(Literal)} instead.
*/
@Deprecated
public Builder withWriteDefault(Object fieldWriteDefault) {
return withWriteDefault(Expressions.lit(fieldWriteDefault));
}
public Builder withWriteDefault(Literal<?> fieldWriteDefault) {
writeDefault = fieldWriteDefault;
return this;
}
public NestedField build() {
Preconditions.checkNotNull(id, "Id cannot be null");
// the constructor validates the other fields
return new NestedField(isOptional, id, name, type, doc, initialDefault, writeDefault);
}
}
private final boolean isOptional;
private final int id;
private final String name;
private final Type type;
private final String doc;
private final Literal<?> initialDefault;
private final Literal<?> writeDefault;
private NestedField(
boolean isOptional,
int id,
String name,
Type type,
String doc,
Literal<?> initialDefault,
Literal<?> writeDefault) {
Preconditions.checkNotNull(name, "Name cannot be null");
Preconditions.checkNotNull(type, "Type cannot be null");
Preconditions.checkArgument(
isOptional || !type.equals(UnknownType.get()),
"Cannot create required field with unknown type: %s",
name);
this.isOptional = isOptional;
this.id = id;
this.name = name;
this.type = type;
this.doc = doc;
this.initialDefault = castDefault(initialDefault, type);
this.writeDefault = castDefault(writeDefault, type);
}
private static Literal<?> castDefault(Literal<?> defaultValue, Type type) {
if (type.isNestedType() && defaultValue != null) {
throw new IllegalArgumentException(
String.format("Invalid default value for %s: %s (must be null)", type, defaultValue));
} else if (defaultValue != null) {
Literal<?> typedDefault = defaultValue.to(type);
Preconditions.checkArgument(
typedDefault != null, "Cannot cast default value to %s: %s", type, defaultValue);
return typedDefault;
}
return null;
}
public boolean isOptional() {
return isOptional;
}
public NestedField asOptional() {
if (isOptional) {
return this;
}
return new NestedField(true, id, name, type, doc, initialDefault, writeDefault);
}
public boolean isRequired() {
return !isOptional;
}
public NestedField asRequired() {
if (!isOptional) {
return this;
}
return new NestedField(false, id, name, type, doc, initialDefault, writeDefault);
}
/**
* @deprecated will be removed in 2.0.0; use {@link Builder#withId(int)} instead
*/
@Deprecated
public NestedField withFieldId(int newId) {
return new NestedField(isOptional, newId, name, type, doc, initialDefault, writeDefault);
}
public int fieldId() {
return id;
}
public String name() {
return name;
}
public Type type() {
return type;
}
public String doc() {
return doc;
}
public Literal<?> initialDefaultLiteral() {
return initialDefault;
}
public Object initialDefault() {
return initialDefault != null ? initialDefault.value() : null;
}
public Literal<?> writeDefaultLiteral() {
return writeDefault;
}
public Object writeDefault() {
return writeDefault != null ? writeDefault.value() : null;
}
@Override
public String toString() {
return String.format(
Locale.ROOT, "%d: %s: %s %s", id, name, isOptional ? "optional" : "required", type)
+ (doc != null ? " (" + doc + ")" : "");
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
} else if (!(o instanceof NestedField)) {
return false;
}
NestedField that = (NestedField) o;
if (isOptional != that.isOptional) {
return false;
} else if (id != that.id) {
return false;
} else if (!name.equals(that.name)) {
return false;
} else if (!Objects.equals(doc, that.doc)) {
return false;
} else if (!type.equals(that.type)) {
return false;
} else if (!Objects.equals(initialDefault, that.initialDefault)) {
return false;
} else if (!Objects.equals(writeDefault, that.writeDefault)) {
return false;
}
return true;
}
@Override
public int hashCode() {
return Objects.hash(NestedField.class, id, isOptional, name, type);
}
}
public static class StructType extends NestedType {
private static final Joiner FIELD_SEP = Joiner.on(", ");
public static StructType of(NestedField... fields) {
return of(Arrays.asList(fields));
}
public static StructType of(List<NestedField> fields) {
return new StructType(fields);
}
private final NestedField[] fields;
// lazy values
private transient Schema schema = null;
private transient List<NestedField> fieldList = null;
private transient Map<String, NestedField> fieldsByName = null;
private transient Map<String, NestedField> fieldsByLowerCaseName = null;
private transient Map<Integer, NestedField> fieldsById = null;
private StructType(List<NestedField> fields) {
Preconditions.checkNotNull(fields, "Field list cannot be null");
this.fields = new NestedField[fields.size()];
for (int i = 0; i < this.fields.length; i += 1) {
this.fields[i] = fields.get(i);
}
}
@Override
public List<NestedField> fields() {
return lazyFieldList();
}
public NestedField field(String name) {
return lazyFieldsByName().get(name);
}
@Override
public NestedField field(int id) {
return lazyFieldsById().get(id);
}
public NestedField caseInsensitiveField(String name) {
return lazyFieldsByLowerCaseName().get(name.toLowerCase(Locale.ROOT));
}
@Override
public Type fieldType(String name) {
NestedField field = field(name);
if (field != null) {
return field.type();
}
return null;
}
@Override
public TypeID typeId() {
return TypeID.STRUCT;
}
@Override
public boolean isStructType() {
return true;
}
@Override
public Types.StructType asStructType() {
return this;
}
/**
* Returns a schema which contains the columns inside struct type. This method can be used to
* avoid expensive conversion of StructType containing large number of columns to Schema during
* manifest evaluation.
*
* @return the schema containing columns of struct type.
*/
public Schema asSchema() {
if (this.schema == null) {
this.schema = new Schema(Arrays.asList(this.fields));
}
return this.schema;
}
@Override
public String toString() {
return String.format("struct<%s>", FIELD_SEP.join(fields));
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
} else if (!(o instanceof StructType)) {
return false;
}
StructType that = (StructType) o;
return Arrays.equals(fields, that.fields);
}
@Override
public int hashCode() {
return Objects.hash(NestedField.class, Arrays.hashCode(fields));
}
private List<NestedField> lazyFieldList() {
if (fieldList == null) {
this.fieldList = ImmutableList.copyOf(fields);
}
return fieldList;
}
private Map<String, NestedField> lazyFieldsByName() {
if (fieldsByName == null) {
ImmutableMap.Builder<String, NestedField> byNameBuilder = ImmutableMap.builder();
for (NestedField field : fields) {
byNameBuilder.put(field.name(), field);
}
fieldsByName = byNameBuilder.build();
}
return fieldsByName;
}
private Map<String, NestedField> lazyFieldsByLowerCaseName() {
if (fieldsByLowerCaseName == null) {
ImmutableMap.Builder<String, NestedField> byLowerCaseNameBuilder = ImmutableMap.builder();
for (NestedField field : fields) {
byLowerCaseNameBuilder.put(field.name().toLowerCase(Locale.ROOT), field);
}
fieldsByLowerCaseName = byLowerCaseNameBuilder.build();
}
return fieldsByLowerCaseName;
}
private Map<Integer, NestedField> lazyFieldsById() {
if (fieldsById == null) {
ImmutableMap.Builder<Integer, NestedField> byIdBuilder = ImmutableMap.builder();
for (NestedField field : fields) {
byIdBuilder.put(field.fieldId(), field);
}
this.fieldsById = byIdBuilder.build();
}
return fieldsById;
}
}
public static class ListType extends NestedType {
public static ListType ofOptional(int elementId, Type elementType) {
Preconditions.checkNotNull(elementType, "Element type cannot be null");
return new ListType(NestedField.optional(elementId, "element", elementType));
}
public static ListType ofRequired(int elementId, Type elementType) {
Preconditions.checkNotNull(elementType, "Element type cannot be null");
return new ListType(NestedField.required(elementId, "element", elementType));
}
private final NestedField elementField;
private transient List<NestedField> fields = null;
private ListType(NestedField elementField) {
this.elementField = elementField;
}
public Type elementType() {
return elementField.type();
}
@Override
public Type fieldType(String name) {
if ("element".equals(name)) {
return elementType();
}
return null;
}
@Override
public NestedField field(int id) {
if (elementField.fieldId() == id) {
return elementField;
}
return null;
}
@Override
public List<NestedField> fields() {
return lazyFieldList();
}
public int elementId() {
return elementField.fieldId();
}
public boolean isElementRequired() {
return !elementField.isOptional;
}
public boolean isElementOptional() {
return elementField.isOptional;
}
@Override
public TypeID typeId() {
return TypeID.LIST;
}
@Override
public boolean isListType() {
return true;
}
@Override
public Types.ListType asListType() {
return this;
}
@Override
public String toString() {
return String.format("list<%s>", elementField.type());
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
} else if (!(o instanceof ListType)) {
return false;
}
ListType listType = (ListType) o;
return elementField.equals(listType.elementField);
}
@Override
public int hashCode() {
return Objects.hash(ListType.class, elementField);
}
private List<NestedField> lazyFieldList() {
if (fields == null) {
this.fields = ImmutableList.of(elementField);
}
return fields;
}
}
public static class MapType extends NestedType {
public static MapType ofOptional(int keyId, int valueId, Type keyType, Type valueType) {
Preconditions.checkNotNull(valueType, "Value type cannot be null");
return new MapType(
NestedField.required(keyId, "key", keyType),
NestedField.optional(valueId, "value", valueType));
}
public static MapType ofRequired(int keyId, int valueId, Type keyType, Type valueType) {
Preconditions.checkNotNull(valueType, "Value type cannot be null");
return new MapType(
NestedField.required(keyId, "key", keyType),
NestedField.required(valueId, "value", valueType));
}
private final NestedField keyField;
private final NestedField valueField;
private transient List<NestedField> fields = null;
private MapType(NestedField keyField, NestedField valueField) {
this.keyField = keyField;
this.valueField = valueField;
}
public Type keyType() {
return keyField.type();
}
public Type valueType() {
return valueField.type();
}
@Override
public Type fieldType(String name) {
if ("key".equals(name)) {
return keyField.type();
} else if ("value".equals(name)) {
return valueField.type();
}
return null;
}
@Override
public NestedField field(int id) {
if (keyField.fieldId() == id) {
return keyField;
} else if (valueField.fieldId() == id) {
return valueField;
}
return null;
}
@Override
public List<NestedField> fields() {
return lazyFieldList();
}
public int keyId() {
return keyField.fieldId();
}
public int valueId() {
return valueField.fieldId();
}
public boolean isValueRequired() {
return !valueField.isOptional;
}
public boolean isValueOptional() {
return valueField.isOptional;
}
@Override
public TypeID typeId() {
return TypeID.MAP;
}
@Override
public boolean isMapType() {
return true;
}
@Override
public Types.MapType asMapType() {
return this;
}
@Override
public String toString() {
return String.format("map<%s, %s>", keyField.type(), valueField.type());
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
} else if (!(o instanceof MapType)) {
return false;
}
MapType mapType = (MapType) o;
if (!keyField.equals(mapType.keyField)) {
return false;
}
return valueField.equals(mapType.valueField);
}
@Override
public int hashCode() {
return Objects.hash(MapType.class, keyField, valueField);
}
private List<NestedField> lazyFieldList() {
if (fields == null) {
this.fields = ImmutableList.of(keyField, valueField);
}
return fields;
}
}
}
|
googleapis/google-cloud-java | 35,940 | java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListTablesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.bigquery.biglake.v1alpha1;
/**
*
*
* <pre>
* Request message for the ListTables method.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest}
*/
public final class ListTablesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest)
ListTablesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListTablesRequest.newBuilder() to construct.
private ListTablesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListTablesRequest() {
parent_ = "";
pageToken_ = "";
view_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListTablesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1alpha1_ListTablesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1alpha1_ListTablesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest.class,
com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent, which owns this collection of tables.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of tables.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* The maximum number of tables to return. The service may return fewer than
* this value.
* If unspecified, at most 50 tables will be returned.
* The maximum value is 1000; values above 1000 will be coerced to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A page token, received from a previous `ListTables` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListTables` must match
* the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A page token, received from a previous `ListTables` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListTables` must match
* the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int VIEW_FIELD_NUMBER = 4;
private int view_ = 0;
/**
*
*
* <pre>
* The view for the returned tables.
* </pre>
*
* <code>.google.cloud.bigquery.biglake.v1alpha1.TableView view = 4;</code>
*
* @return The enum numeric value on the wire for view.
*/
@java.lang.Override
public int getViewValue() {
return view_;
}
/**
*
*
* <pre>
* The view for the returned tables.
* </pre>
*
* <code>.google.cloud.bigquery.biglake.v1alpha1.TableView view = 4;</code>
*
* @return The view.
*/
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1alpha1.TableView getView() {
com.google.cloud.bigquery.biglake.v1alpha1.TableView result =
com.google.cloud.bigquery.biglake.v1alpha1.TableView.forNumber(view_);
return result == null
? com.google.cloud.bigquery.biglake.v1alpha1.TableView.UNRECOGNIZED
: result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (view_
!= com.google.cloud.bigquery.biglake.v1alpha1.TableView.TABLE_VIEW_UNSPECIFIED
.getNumber()) {
output.writeEnum(4, view_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (view_
!= com.google.cloud.bigquery.biglake.v1alpha1.TableView.TABLE_VIEW_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(4, view_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest)) {
return super.equals(obj);
}
com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest other =
(com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (view_ != other.view_) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + VIEW_FIELD_NUMBER;
hash = (53 * hash) + view_;
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for the ListTables method.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest)
com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1alpha1_ListTablesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1alpha1_ListTablesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest.class,
com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest.Builder.class);
}
// Construct using com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
view_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1alpha1_ListTablesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest
getDefaultInstanceForType() {
return com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest build() {
com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest buildPartial() {
com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest result =
new com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.view_ = view_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest) {
return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest other) {
if (other
== com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (other.view_ != 0) {
setViewValue(other.getViewValue());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 32:
{
view_ = input.readEnum();
bitField0_ |= 0x00000008;
break;
} // case 32
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent, which owns this collection of tables.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of tables.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of tables.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of tables.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of tables.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* The maximum number of tables to return. The service may return fewer than
* this value.
* If unspecified, at most 50 tables will be returned.
* The maximum value is 1000; values above 1000 will be coerced to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* The maximum number of tables to return. The service may return fewer than
* this value.
* If unspecified, at most 50 tables will be returned.
* The maximum value is 1000; values above 1000 will be coerced to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum number of tables to return. The service may return fewer than
* this value.
* If unspecified, at most 50 tables will be returned.
* The maximum value is 1000; values above 1000 will be coerced to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A page token, received from a previous `ListTables` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListTables` must match
* the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A page token, received from a previous `ListTables` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListTables` must match
* the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A page token, received from a previous `ListTables` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListTables` must match
* the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* A page token, received from a previous `ListTables` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListTables` must match
* the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* A page token, received from a previous `ListTables` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListTables` must match
* the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private int view_ = 0;
/**
*
*
* <pre>
* The view for the returned tables.
* </pre>
*
* <code>.google.cloud.bigquery.biglake.v1alpha1.TableView view = 4;</code>
*
* @return The enum numeric value on the wire for view.
*/
@java.lang.Override
public int getViewValue() {
return view_;
}
/**
*
*
* <pre>
* The view for the returned tables.
* </pre>
*
* <code>.google.cloud.bigquery.biglake.v1alpha1.TableView view = 4;</code>
*
* @param value The enum numeric value on the wire for view to set.
* @return This builder for chaining.
*/
public Builder setViewValue(int value) {
view_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* The view for the returned tables.
* </pre>
*
* <code>.google.cloud.bigquery.biglake.v1alpha1.TableView view = 4;</code>
*
* @return The view.
*/
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1alpha1.TableView getView() {
com.google.cloud.bigquery.biglake.v1alpha1.TableView result =
com.google.cloud.bigquery.biglake.v1alpha1.TableView.forNumber(view_);
return result == null
? com.google.cloud.bigquery.biglake.v1alpha1.TableView.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* The view for the returned tables.
* </pre>
*
* <code>.google.cloud.bigquery.biglake.v1alpha1.TableView view = 4;</code>
*
* @param value The view to set.
* @return This builder for chaining.
*/
public Builder setView(com.google.cloud.bigquery.biglake.v1alpha1.TableView value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
view_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* The view for the returned tables.
* </pre>
*
* <code>.google.cloud.bigquery.biglake.v1alpha1.TableView view = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearView() {
bitField0_ = (bitField0_ & ~0x00000008);
view_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest)
private static final com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest();
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListTablesRequest> PARSER =
new com.google.protobuf.AbstractParser<ListTablesRequest>() {
@java.lang.Override
public ListTablesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListTablesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListTablesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1alpha1.ListTablesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,013 | java-certificate-manager/proto-google-cloud-certificate-manager-v1/src/main/java/com/google/cloud/certificatemanager/v1/UpdateCertificateRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/certificatemanager/v1/certificate_manager.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.certificatemanager.v1;
/**
*
*
* <pre>
* Request for the `UpdateCertificate` method.
* </pre>
*
* Protobuf type {@code google.cloud.certificatemanager.v1.UpdateCertificateRequest}
*/
public final class UpdateCertificateRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.certificatemanager.v1.UpdateCertificateRequest)
UpdateCertificateRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateCertificateRequest.newBuilder() to construct.
private UpdateCertificateRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateCertificateRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateCertificateRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.certificatemanager.v1.CertificateManagerProto
.internal_static_google_cloud_certificatemanager_v1_UpdateCertificateRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.certificatemanager.v1.CertificateManagerProto
.internal_static_google_cloud_certificatemanager_v1_UpdateCertificateRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.certificatemanager.v1.UpdateCertificateRequest.class,
com.google.cloud.certificatemanager.v1.UpdateCertificateRequest.Builder.class);
}
private int bitField0_;
public static final int CERTIFICATE_FIELD_NUMBER = 1;
private com.google.cloud.certificatemanager.v1.Certificate certificate_;
/**
*
*
* <pre>
* Required. A definition of the certificate to update.
* </pre>
*
* <code>
* .google.cloud.certificatemanager.v1.Certificate certificate = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the certificate field is set.
*/
@java.lang.Override
public boolean hasCertificate() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. A definition of the certificate to update.
* </pre>
*
* <code>
* .google.cloud.certificatemanager.v1.Certificate certificate = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The certificate.
*/
@java.lang.Override
public com.google.cloud.certificatemanager.v1.Certificate getCertificate() {
return certificate_ == null
? com.google.cloud.certificatemanager.v1.Certificate.getDefaultInstance()
: certificate_;
}
/**
*
*
* <pre>
* Required. A definition of the certificate to update.
* </pre>
*
* <code>
* .google.cloud.certificatemanager.v1.Certificate certificate = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.certificatemanager.v1.CertificateOrBuilder getCertificateOrBuilder() {
return certificate_ == null
? com.google.cloud.certificatemanager.v1.Certificate.getDefaultInstance()
: certificate_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask`
* definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask`
* definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask`
* definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getCertificate());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getCertificate());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.certificatemanager.v1.UpdateCertificateRequest)) {
return super.equals(obj);
}
com.google.cloud.certificatemanager.v1.UpdateCertificateRequest other =
(com.google.cloud.certificatemanager.v1.UpdateCertificateRequest) obj;
if (hasCertificate() != other.hasCertificate()) return false;
if (hasCertificate()) {
if (!getCertificate().equals(other.getCertificate())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasCertificate()) {
hash = (37 * hash) + CERTIFICATE_FIELD_NUMBER;
hash = (53 * hash) + getCertificate().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.certificatemanager.v1.UpdateCertificateRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.certificatemanager.v1.UpdateCertificateRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.certificatemanager.v1.UpdateCertificateRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.certificatemanager.v1.UpdateCertificateRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.certificatemanager.v1.UpdateCertificateRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.certificatemanager.v1.UpdateCertificateRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.certificatemanager.v1.UpdateCertificateRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.certificatemanager.v1.UpdateCertificateRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.certificatemanager.v1.UpdateCertificateRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.certificatemanager.v1.UpdateCertificateRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.certificatemanager.v1.UpdateCertificateRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.certificatemanager.v1.UpdateCertificateRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.certificatemanager.v1.UpdateCertificateRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for the `UpdateCertificate` method.
* </pre>
*
* Protobuf type {@code google.cloud.certificatemanager.v1.UpdateCertificateRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.certificatemanager.v1.UpdateCertificateRequest)
com.google.cloud.certificatemanager.v1.UpdateCertificateRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.certificatemanager.v1.CertificateManagerProto
.internal_static_google_cloud_certificatemanager_v1_UpdateCertificateRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.certificatemanager.v1.CertificateManagerProto
.internal_static_google_cloud_certificatemanager_v1_UpdateCertificateRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.certificatemanager.v1.UpdateCertificateRequest.class,
com.google.cloud.certificatemanager.v1.UpdateCertificateRequest.Builder.class);
}
// Construct using com.google.cloud.certificatemanager.v1.UpdateCertificateRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getCertificateFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
certificate_ = null;
if (certificateBuilder_ != null) {
certificateBuilder_.dispose();
certificateBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.certificatemanager.v1.CertificateManagerProto
.internal_static_google_cloud_certificatemanager_v1_UpdateCertificateRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.certificatemanager.v1.UpdateCertificateRequest
getDefaultInstanceForType() {
return com.google.cloud.certificatemanager.v1.UpdateCertificateRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.certificatemanager.v1.UpdateCertificateRequest build() {
com.google.cloud.certificatemanager.v1.UpdateCertificateRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.certificatemanager.v1.UpdateCertificateRequest buildPartial() {
com.google.cloud.certificatemanager.v1.UpdateCertificateRequest result =
new com.google.cloud.certificatemanager.v1.UpdateCertificateRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.certificatemanager.v1.UpdateCertificateRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.certificate_ =
certificateBuilder_ == null ? certificate_ : certificateBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.certificatemanager.v1.UpdateCertificateRequest) {
return mergeFrom((com.google.cloud.certificatemanager.v1.UpdateCertificateRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.certificatemanager.v1.UpdateCertificateRequest other) {
if (other
== com.google.cloud.certificatemanager.v1.UpdateCertificateRequest.getDefaultInstance())
return this;
if (other.hasCertificate()) {
mergeCertificate(other.getCertificate());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getCertificateFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.certificatemanager.v1.Certificate certificate_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.certificatemanager.v1.Certificate,
com.google.cloud.certificatemanager.v1.Certificate.Builder,
com.google.cloud.certificatemanager.v1.CertificateOrBuilder>
certificateBuilder_;
/**
*
*
* <pre>
* Required. A definition of the certificate to update.
* </pre>
*
* <code>
* .google.cloud.certificatemanager.v1.Certificate certificate = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the certificate field is set.
*/
public boolean hasCertificate() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. A definition of the certificate to update.
* </pre>
*
* <code>
* .google.cloud.certificatemanager.v1.Certificate certificate = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The certificate.
*/
public com.google.cloud.certificatemanager.v1.Certificate getCertificate() {
if (certificateBuilder_ == null) {
return certificate_ == null
? com.google.cloud.certificatemanager.v1.Certificate.getDefaultInstance()
: certificate_;
} else {
return certificateBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. A definition of the certificate to update.
* </pre>
*
* <code>
* .google.cloud.certificatemanager.v1.Certificate certificate = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setCertificate(com.google.cloud.certificatemanager.v1.Certificate value) {
if (certificateBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
certificate_ = value;
} else {
certificateBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. A definition of the certificate to update.
* </pre>
*
* <code>
* .google.cloud.certificatemanager.v1.Certificate certificate = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setCertificate(
com.google.cloud.certificatemanager.v1.Certificate.Builder builderForValue) {
if (certificateBuilder_ == null) {
certificate_ = builderForValue.build();
} else {
certificateBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. A definition of the certificate to update.
* </pre>
*
* <code>
* .google.cloud.certificatemanager.v1.Certificate certificate = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeCertificate(com.google.cloud.certificatemanager.v1.Certificate value) {
if (certificateBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& certificate_ != null
&& certificate_
!= com.google.cloud.certificatemanager.v1.Certificate.getDefaultInstance()) {
getCertificateBuilder().mergeFrom(value);
} else {
certificate_ = value;
}
} else {
certificateBuilder_.mergeFrom(value);
}
if (certificate_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. A definition of the certificate to update.
* </pre>
*
* <code>
* .google.cloud.certificatemanager.v1.Certificate certificate = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearCertificate() {
bitField0_ = (bitField0_ & ~0x00000001);
certificate_ = null;
if (certificateBuilder_ != null) {
certificateBuilder_.dispose();
certificateBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. A definition of the certificate to update.
* </pre>
*
* <code>
* .google.cloud.certificatemanager.v1.Certificate certificate = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.certificatemanager.v1.Certificate.Builder getCertificateBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getCertificateFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. A definition of the certificate to update.
* </pre>
*
* <code>
* .google.cloud.certificatemanager.v1.Certificate certificate = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.certificatemanager.v1.CertificateOrBuilder getCertificateOrBuilder() {
if (certificateBuilder_ != null) {
return certificateBuilder_.getMessageOrBuilder();
} else {
return certificate_ == null
? com.google.cloud.certificatemanager.v1.Certificate.getDefaultInstance()
: certificate_;
}
}
/**
*
*
* <pre>
* Required. A definition of the certificate to update.
* </pre>
*
* <code>
* .google.cloud.certificatemanager.v1.Certificate certificate = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.certificatemanager.v1.Certificate,
com.google.cloud.certificatemanager.v1.Certificate.Builder,
com.google.cloud.certificatemanager.v1.CertificateOrBuilder>
getCertificateFieldBuilder() {
if (certificateBuilder_ == null) {
certificateBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.certificatemanager.v1.Certificate,
com.google.cloud.certificatemanager.v1.Certificate.Builder,
com.google.cloud.certificatemanager.v1.CertificateOrBuilder>(
getCertificate(), getParentForChildren(), isClean());
certificate_ = null;
}
return certificateBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask`
* definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask`
* definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask`
* definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask`
* definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask`
* definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask`
* definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask`
* definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask`
* definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask`
* definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.certificatemanager.v1.UpdateCertificateRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.certificatemanager.v1.UpdateCertificateRequest)
private static final com.google.cloud.certificatemanager.v1.UpdateCertificateRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.certificatemanager.v1.UpdateCertificateRequest();
}
public static com.google.cloud.certificatemanager.v1.UpdateCertificateRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateCertificateRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateCertificateRequest>() {
@java.lang.Override
public UpdateCertificateRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateCertificateRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateCertificateRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.certificatemanager.v1.UpdateCertificateRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/twill | 36,202 | twill-yarn/src/main/java/org/apache/twill/yarn/YarnTwillPreparer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.twill.yarn;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Multimap;
import com.google.common.collect.Sets;
import com.google.common.hash.Hasher;
import com.google.common.hash.Hashing;
import com.google.common.io.ByteStreams;
import com.google.common.io.OutputSupplier;
import com.google.common.reflect.TypeToken;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import joptsimple.OptionSpec;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.twill.api.ClassAcceptor;
import org.apache.twill.api.Configs;
import org.apache.twill.api.EventHandlerSpecification;
import org.apache.twill.api.LocalFile;
import org.apache.twill.api.RunId;
import org.apache.twill.api.RuntimeSpecification;
import org.apache.twill.api.SecureStore;
import org.apache.twill.api.TwillController;
import org.apache.twill.api.TwillPreparer;
import org.apache.twill.api.TwillSpecification;
import org.apache.twill.api.logging.LogEntry;
import org.apache.twill.api.logging.LogHandler;
import org.apache.twill.filesystem.Location;
import org.apache.twill.internal.ApplicationBundler;
import org.apache.twill.internal.Arguments;
import org.apache.twill.internal.Constants;
import org.apache.twill.internal.DefaultLocalFile;
import org.apache.twill.internal.DefaultRuntimeSpecification;
import org.apache.twill.internal.DefaultTwillSpecification;
import org.apache.twill.internal.EnvKeys;
import org.apache.twill.internal.JvmOptions;
import org.apache.twill.internal.LogOnlyEventHandler;
import org.apache.twill.internal.ProcessController;
import org.apache.twill.internal.ProcessLauncher;
import org.apache.twill.internal.TwillRuntimeSpecification;
import org.apache.twill.internal.appmaster.ApplicationMasterInfo;
import org.apache.twill.internal.appmaster.ApplicationMasterMain;
import org.apache.twill.internal.container.TwillContainerMain;
import org.apache.twill.internal.io.LocationCache;
import org.apache.twill.internal.json.ArgumentsCodec;
import org.apache.twill.internal.json.LocalFileCodec;
import org.apache.twill.internal.json.TwillRuntimeSpecificationAdapter;
import org.apache.twill.internal.utils.Dependencies;
import org.apache.twill.internal.utils.Paths;
import org.apache.twill.internal.utils.Resources;
import org.apache.twill.internal.yarn.VersionDetectYarnAppClientFactory;
import org.apache.twill.internal.yarn.YarnAppClient;
import org.apache.twill.internal.yarn.YarnApplicationReport;
import org.apache.twill.internal.yarn.YarnUtils;
import org.apache.twill.launcher.FindFreePort;
import org.apache.twill.launcher.TwillLauncher;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.TimeUnit;
import java.util.jar.JarEntry;
import java.util.jar.JarOutputStream;
import java.util.stream.Collectors;
import javax.annotation.Nullable;
/**
* Implementation for {@link TwillPreparer} to prepare and launch distributed application on Hadoop YARN.
*/
final class YarnTwillPreparer implements TwillPreparer {
private static final Logger LOG = LoggerFactory.getLogger(YarnTwillPreparer.class);
private final Configuration config;
private final TwillSpecification twillSpec;
private final String zkConnectString;
private final Location appLocation;
private final YarnTwillControllerFactory controllerFactory;
private final RunId runId;
private final List<LogHandler> logHandlers = Lists.newArrayList();
private final List<String> arguments = Lists.newArrayList();
private final Set<Class<?>> dependencies = Sets.newIdentityHashSet();
private final List<URI> resources = Lists.newArrayList();
private final List<String> classPaths = Lists.newArrayList();
private final ListMultimap<String, String> runnableArgs = ArrayListMultimap.create();
private final Map<String, Map<String, String>> environments = Maps.newHashMap();
private final List<String> applicationClassPaths = Lists.newArrayList();
private final Credentials credentials;
private final Map<String, Map<String, String>> logLevels = Maps.newHashMap();
private final LocationCache locationCache;
private final Map<String, Integer> maxRetries = Maps.newHashMap();
private final Map<String, Map<String, String>> runnableConfigs = Maps.newHashMap();
private final Map<String, String> runnableExtraOptions = Maps.newHashMap();
private String extraOptions;
private JvmOptions.DebugOptions debugOptions = JvmOptions.DebugOptions.NO_DEBUG;
private String schedulerQueue;
private ClassAcceptor classAcceptor;
private String classLoaderClassName;
YarnTwillPreparer(Configuration config, TwillSpecification twillSpec, RunId runId,
String zkConnectString, Location appLocation, @Nullable String extraOptions,
LocationCache locationCache, YarnTwillControllerFactory controllerFactory) {
this.config = config;
this.twillSpec = twillSpec;
this.runId = runId;
this.zkConnectString = zkConnectString;
this.appLocation = appLocation;
this.controllerFactory = controllerFactory;
this.credentials = createCredentials();
this.extraOptions = extraOptions == null ? "" : extraOptions;
this.classAcceptor = new ClassAcceptor();
this.locationCache = locationCache;
}
private void confirmRunnableName(String runnableName) {
Preconditions.checkNotNull(runnableName);
Preconditions.checkArgument(twillSpec.getRunnables().containsKey(runnableName),
"Runnable %s is not defined in the application.", runnableName);
}
@Override
public TwillPreparer withConfiguration(Map<String, String> config) {
for (Map.Entry<String, String> entry : config.entrySet()) {
this.config.set(entry.getKey(), entry.getValue());
}
return this;
}
@Override
public TwillPreparer withConfiguration(String runnableName, Map<String, String> config) {
confirmRunnableName(runnableName);
runnableConfigs.put(runnableName, Maps.newHashMap(config));
return this;
}
@Override
public TwillPreparer addLogHandler(LogHandler handler) {
logHandlers.add(handler);
return this;
}
@Override
public TwillPreparer setUser(String user) {
return this;
}
@Override
public TwillPreparer setSchedulerQueue(String name) {
this.schedulerQueue = name;
return this;
}
@Override
public TwillPreparer setJVMOptions(String options) {
Preconditions.checkArgument(options != null, "JVM options cannot be null.");
this.extraOptions = options;
return this;
}
@Override
public TwillPreparer setJVMOptions(String runnableName, String options) {
confirmRunnableName(runnableName);
Preconditions.checkArgument(options != null, "JVM options cannot be null.");
runnableExtraOptions.put(runnableName, options);
return this;
}
@Override
public TwillPreparer addJVMOptions(String options) {
Preconditions.checkArgument(options != null, "JVM options cannot be null.");
this.extraOptions = extraOptions.isEmpty() ? options : extraOptions + " " + options;
return this;
}
@Override
public TwillPreparer enableDebugging(String... runnables) {
return enableDebugging(false, runnables);
}
@Override
public TwillPreparer enableDebugging(boolean doSuspend, String... runnables) {
for (String runnableName : runnables) {
confirmRunnableName(runnableName);
}
this.debugOptions = new JvmOptions.DebugOptions(true, doSuspend, ImmutableSet.copyOf(runnables));
return this;
}
@Override
public TwillPreparer withApplicationArguments(String... args) {
return withApplicationArguments(ImmutableList.copyOf(args));
}
@Override
public TwillPreparer withApplicationArguments(Iterable<String> args) {
Iterables.addAll(arguments, args);
return this;
}
@Override
public TwillPreparer withArguments(String runnableName, String... args) {
return withArguments(runnableName, ImmutableList.copyOf(args));
}
@Override
public TwillPreparer withArguments(String runnableName, Iterable<String> args) {
confirmRunnableName(runnableName);
runnableArgs.putAll(runnableName, args);
return this;
}
@Override
public TwillPreparer withDependencies(Class<?>... classes) {
return withDependencies(ImmutableList.copyOf(classes));
}
@Override
public TwillPreparer withDependencies(Iterable<Class<?>> classes) {
Iterables.addAll(dependencies, classes);
return this;
}
@Override
public TwillPreparer withResources(URI... resources) {
return withResources(ImmutableList.copyOf(resources));
}
@Override
public TwillPreparer withResources(Iterable<URI> resources) {
Iterables.addAll(this.resources, resources);
return this;
}
@Override
public TwillPreparer withClassPaths(String... classPaths) {
return withClassPaths(ImmutableList.copyOf(classPaths));
}
@Override
public TwillPreparer withClassPaths(Iterable<String> classPaths) {
Iterables.addAll(this.classPaths, classPaths);
return this;
}
@Override
public TwillPreparer withEnv(Map<String, String> env) {
// Add the given environments to all runnables
for (String runnableName : twillSpec.getRunnables().keySet()) {
setEnv(runnableName, env, false);
}
return this;
}
@Override
public TwillPreparer withEnv(String runnableName, Map<String, String> env) {
confirmRunnableName(runnableName);
setEnv(runnableName, env, true);
return this;
}
@Override
public TwillPreparer withApplicationClassPaths(String... classPaths) {
return withApplicationClassPaths(ImmutableList.copyOf(classPaths));
}
@Override
public TwillPreparer withApplicationClassPaths(Iterable<String> classPaths) {
Iterables.addAll(this.applicationClassPaths, classPaths);
return this;
}
@Override
public TwillPreparer withBundlerClassAcceptor(ClassAcceptor classAcceptor) {
this.classAcceptor = classAcceptor;
return this;
}
@Override
public TwillPreparer withMaxRetries(String runnableName, int maxRetries) {
confirmRunnableName(runnableName);
this.maxRetries.put(runnableName, maxRetries);
return this;
}
@Override
public TwillPreparer addSecureStore(SecureStore secureStore) {
Object store = secureStore.getStore();
Preconditions.checkArgument(store instanceof Credentials, "Only Hadoop Credentials is supported.");
this.credentials.mergeAll((Credentials) store);
return this;
}
@Override
public TwillPreparer setLogLevel(LogEntry.Level logLevel) {
return setLogLevels(ImmutableMap.of(Logger.ROOT_LOGGER_NAME, logLevel));
}
@Override
public TwillPreparer setLogLevels(Map<String, LogEntry.Level> logLevels) {
Preconditions.checkNotNull(logLevels);
for (String runnableName : twillSpec.getRunnables().keySet()) {
saveLogLevels(runnableName, logLevels);
}
return this;
}
@Override
public TwillPreparer setLogLevels(String runnableName, Map<String, LogEntry.Level> runnableLogLevels) {
confirmRunnableName(runnableName);
Preconditions.checkNotNull(runnableLogLevels);
Preconditions.checkArgument(!(logLevels.containsKey(Logger.ROOT_LOGGER_NAME)
&& logLevels.get(Logger.ROOT_LOGGER_NAME) == null));
saveLogLevels(runnableName, runnableLogLevels);
return this;
}
@Override
public TwillPreparer setClassLoader(String classLoaderClassName) {
this.classLoaderClassName = classLoaderClassName;
return this;
}
@Override
public TwillController start() {
return start(Constants.APPLICATION_MAX_START_SECONDS, TimeUnit.SECONDS);
}
@Override
public TwillController start(long timeout, TimeUnit timeoutUnit) {
try {
final YarnAppClient yarnAppClient = new VersionDetectYarnAppClientFactory().create(config);
final ProcessLauncher<ApplicationMasterInfo> launcher = yarnAppClient.createLauncher(twillSpec, schedulerQueue);
final ApplicationMasterInfo appMasterInfo = launcher.getContainerInfo();
Callable<ProcessController<YarnApplicationReport>> submitTask =
new Callable<ProcessController<YarnApplicationReport>>() {
@Override
public ProcessController<YarnApplicationReport> call() throws Exception {
// Local files needed by AM
Map<String, LocalFile> localFiles = Maps.newHashMap();
createLauncherJar(localFiles);
createTwillJar(createBundler(classAcceptor), yarnAppClient, localFiles);
createApplicationJar(createBundler(classAcceptor), localFiles);
createResourcesJar(createBundler(classAcceptor), localFiles);
TwillRuntimeSpecification twillRuntimeSpec;
JvmOptions jvmOptions;
Path runtimeConfigDir = Files.createTempDirectory(getLocalStagingDir().toPath(),
Constants.Files.RUNTIME_CONFIG_JAR);
try {
twillRuntimeSpec = saveSpecification(twillSpec, runtimeConfigDir.resolve(Constants.Files.TWILL_SPEC));
saveLogback(runtimeConfigDir.resolve(Constants.Files.LOGBACK_TEMPLATE));
saveClassPaths(runtimeConfigDir);
jvmOptions = saveJvmOptions(runtimeConfigDir.resolve(Constants.Files.JVM_OPTIONS));
saveArguments(new Arguments(arguments, runnableArgs),
runtimeConfigDir.resolve(Constants.Files.ARGUMENTS));
saveEnvironments(runtimeConfigDir.resolve(Constants.Files.ENVIRONMENTS));
createRuntimeConfigJar(runtimeConfigDir, localFiles);
} finally {
Paths.deleteRecursively(runtimeConfigDir);
}
createLocalizeFilesJson(localFiles);
LOG.debug("Submit AM container spec: {}", appMasterInfo);
// java -Djava.io.tmpdir=tmp -cp launcher.jar:$HADOOP_CONF_DIR -XmxMemory
// org.apache.twill.internal.TwillLauncher
// appMaster.jar
// org.apache.twill.internal.appmaster.ApplicationMasterMain
// false
int memory = Resources.computeMaxHeapSize(appMasterInfo.getMemoryMB(),
twillRuntimeSpec.getAMReservedMemory(),
twillRuntimeSpec.getAMMinHeapRatio());
return launcher.prepareLaunch(ImmutableMap.<String, String>of(), localFiles.values(),
createSubmissionCredentials())
.addCommand(
"$JAVA_HOME/bin/java",
"-Djava.io.tmpdir=tmp",
"-Dyarn.appId=$" + EnvKeys.YARN_APP_ID_STR,
"-Dtwill.app=$" + Constants.TWILL_APP_NAME,
"-cp", Constants.Files.LAUNCHER_JAR + ":$HADOOP_CONF_DIR",
"-Xmx" + memory + "m",
jvmOptions.getAMExtraOptions(),
TwillLauncher.class.getName(),
ApplicationMasterMain.class.getName(),
Boolean.FALSE.toString())
.launch();
}
};
boolean logCollectionEnabled = config.getBoolean(Configs.Keys.LOG_COLLECTION_ENABLED,
Configs.Defaults.LOG_COLLECTION_ENABLED);
YarnTwillController controller = controllerFactory.create(runId, logCollectionEnabled,
logHandlers, submitTask, timeout, timeoutUnit);
controller.start();
return controller;
} catch (Exception e) {
LOG.error("Failed to submit application {}", twillSpec.getName(), e);
throw Throwables.propagate(e);
}
}
/**
* Returns the local staging directory based on the configuration.
*/
private File getLocalStagingDir() {
return new File(config.get(Configs.Keys.LOCAL_STAGING_DIRECTORY, Configs.Defaults.LOCAL_STAGING_DIRECTORY));
}
/**
* Returns the extra options for the container JVM.
*/
private String addClassLoaderClassName(String extraOptions) {
if (classLoaderClassName == null) {
return extraOptions;
}
String classLoaderProperty = "-D" + Constants.TWILL_CONTAINER_CLASSLOADER + "=" + classLoaderClassName;
return extraOptions.isEmpty() ? classLoaderProperty : extraOptions + " " + classLoaderProperty;
}
private void setEnv(String runnableName, Map<String, String> env, boolean overwrite) {
Map<String, String> environment = environments.get(runnableName);
if (environment == null) {
environment = new LinkedHashMap<>(env);
environments.put(runnableName, environment);
return;
}
for (Map.Entry<String, String> entry : env.entrySet()) {
if (overwrite || !environment.containsKey(entry.getKey())) {
environment.put(entry.getKey(), entry.getValue());
}
}
}
private void saveLogLevels(String runnableName, Map<String, LogEntry.Level> logLevels) {
Map<String, String> newLevels = new HashMap<>();
for (Map.Entry<String, LogEntry.Level> entry : logLevels.entrySet()) {
Preconditions.checkArgument(entry.getValue() != null, "Log level cannot be null for logger {}", entry.getKey());
newLevels.put(entry.getKey(), entry.getValue().name());
}
this.logLevels.put(runnableName, newLevels);
}
/**
* Creates an {@link Credentials} by copying the {@link Credentials} of the current user.
*/
private Credentials createCredentials() {
Credentials credentials = new Credentials();
try {
credentials.addAll(UserGroupInformation.getCurrentUser().getCredentials());
} catch (IOException e) {
LOG.warn("Failed to get current user UGI. Current user credentials not added.", e);
}
return credentials;
}
/**
* Creates a {@link Credentials} for the application submission.
*/
private Credentials createSubmissionCredentials() {
Credentials credentials = new Credentials();
try {
// Acquires delegation token for the location
List<Token<?>> tokens = YarnUtils.addDelegationTokens(config, appLocation.getLocationFactory(), credentials);
if (LOG.isDebugEnabled()) {
for (Token<?> token : tokens) {
LOG.debug("Delegation token acquired for {}, {}", appLocation, token);
}
}
} catch (IOException e) {
LOG.warn("Failed to acquire delegation token for location {}", appLocation);
}
// Copy the user provided credentials.
// It will override the location delegation tokens acquired above if user supplies it.
credentials.addAll(this.credentials);
return credentials;
}
private LocalFile createLocalFile(String name, Location location) throws IOException {
return createLocalFile(name, location, false);
}
private LocalFile createLocalFile(String name, Location location, boolean archive) throws IOException {
return new DefaultLocalFile(name, location.toURI(), location.lastModified(), location.length(), archive, null);
}
private void createTwillJar(final ApplicationBundler bundler,
final YarnAppClient yarnAppClient,
Map<String, LocalFile> localFiles) throws IOException {
LOG.debug("Create and copy {}", Constants.Files.TWILL_JAR);
Location location = locationCache.get(Constants.Files.TWILL_JAR, new LocationCache.Loader() {
@Override
public void load(String name, Location targetLocation) throws IOException {
// Stuck in the yarnAppClient class to make bundler being able to pickup the right yarn-client version
bundler.createBundle(targetLocation, ApplicationMasterMain.class,
yarnAppClient.getClass(), TwillContainerMain.class, OptionSpec.class);
}
});
LOG.debug("Done {}", Constants.Files.TWILL_JAR);
localFiles.put(Constants.Files.TWILL_JAR, createLocalFile(Constants.Files.TWILL_JAR, location, true));
}
private void createApplicationJar(final ApplicationBundler bundler,
Map<String, LocalFile> localFiles) throws IOException {
try {
final Set<Class<?>> classes = Sets.newIdentityHashSet();
classes.addAll(dependencies);
ClassLoader classLoader = getClassLoader();
for (RuntimeSpecification spec : twillSpec.getRunnables().values()) {
classes.add(classLoader.loadClass(spec.getRunnableSpecification().getClassName()));
}
// Add the TwillRunnableEventHandler class
if (twillSpec.getEventHandler() != null) {
classes.add(classLoader.loadClass(twillSpec.getEventHandler().getClassName()));
}
// Optionally add the custom classloader class
if (classLoaderClassName != null) {
try {
classes.add(classLoader.loadClass(classLoaderClassName));
} catch (ClassNotFoundException e) {
// Don't throw if the classloader class is not found, as it can be available
// in the target cluster with appropriate classpath setting
LOG.debug("Cannot load custom classloader class '{}' when preparing for application launch",
classLoaderClassName);
}
}
// The location name is computed from the MD5 of all the classes names
// The localized name is always APPLICATION_JAR
List<String> classList = classes.stream().map(Class::getName).sorted().collect(Collectors.toList());
Hasher hasher = Hashing.md5().newHasher();
for (String name : classList) {
hasher.putString(name);
}
// Only depends on class list so that it can be reused across different launches
String name = hasher.hash().toString() + "-" + Constants.Files.APPLICATION_JAR;
LOG.debug("Create and copy {}", Constants.Files.APPLICATION_JAR);
Location location = locationCache.get(name, new LocationCache.Loader() {
@Override
public void load(String name, Location targetLocation) throws IOException {
bundler.createBundle(targetLocation, classes);
}
});
LOG.debug("Done {}", Constants.Files.APPLICATION_JAR);
localFiles.put(Constants.Files.APPLICATION_JAR, createLocalFile(Constants.Files.APPLICATION_JAR, location, true));
} catch (ClassNotFoundException e) {
throw Throwables.propagate(e);
}
}
private void createResourcesJar(ApplicationBundler bundler, Map<String, LocalFile> localFiles) throws IOException {
// If there is no resources, no need to create the jar file.
if (resources.isEmpty()) {
return;
}
LOG.debug("Create and copy {}", Constants.Files.RESOURCES_JAR);
Location location = createTempLocation(Constants.Files.RESOURCES_JAR);
bundler.createBundle(location, Collections.<Class<?>>emptyList(), resources);
LOG.debug("Done {}", Constants.Files.RESOURCES_JAR);
localFiles.put(Constants.Files.RESOURCES_JAR, createLocalFile(Constants.Files.RESOURCES_JAR, location, true));
}
private void createRuntimeConfigJar(Path dir, Map<String, LocalFile> localFiles) throws IOException {
LOG.debug("Create and copy {}", Constants.Files.RUNTIME_CONFIG_JAR);
// Jar everything under the given directory, which contains different files needed by AM/runnable containers
Location location = createTempLocation(Constants.Files.RUNTIME_CONFIG_JAR);
try (
JarOutputStream jarOutput = new JarOutputStream(location.getOutputStream());
DirectoryStream<Path> stream = Files.newDirectoryStream(dir)
) {
for (Path path : stream) {
jarOutput.putNextEntry(new JarEntry(path.getFileName().toString()));
Files.copy(path, jarOutput);
jarOutput.closeEntry();
}
}
LOG.debug("Done {}", Constants.Files.RUNTIME_CONFIG_JAR);
localFiles.put(Constants.Files.RUNTIME_CONFIG_JAR,
createLocalFile(Constants.Files.RUNTIME_CONFIG_JAR, location, true));
}
/**
* Based on the given {@link TwillSpecification}, upload LocalFiles to Yarn Cluster.
* @param twillSpec The {@link TwillSpecification} for populating resource.
*/
private Multimap<String, LocalFile> populateRunnableLocalFiles(TwillSpecification twillSpec) throws IOException {
Multimap<String, LocalFile> localFiles = HashMultimap.create();
LOG.debug("Populating Runnable LocalFiles");
for (Map.Entry<String, RuntimeSpecification> entry: twillSpec.getRunnables().entrySet()) {
String runnableName = entry.getKey();
for (LocalFile localFile : entry.getValue().getLocalFiles()) {
Location location;
URI uri = localFile.getURI();
if (appLocation.toURI().getScheme().equals(uri.getScheme())) {
// If the source file location is having the same scheme as the target location, no need to copy
location = appLocation.getLocationFactory().create(uri);
} else {
URL url = uri.toURL();
LOG.debug("Create and copy {} : {}", runnableName, url);
// Preserves original suffix for expansion.
location = copyFromURL(url, createTempLocation(Paths.addExtension(url.getFile(), localFile.getName())));
LOG.debug("Done {} : {}", runnableName, url);
}
localFiles.put(runnableName,
new DefaultLocalFile(localFile.getName(), location.toURI(), location.lastModified(),
location.length(), localFile.isArchive(), localFile.getPattern()));
}
}
LOG.debug("Done Runnable LocalFiles");
return localFiles;
}
private TwillRuntimeSpecification saveSpecification(TwillSpecification spec, Path targetFile) throws IOException {
final Multimap<String, LocalFile> runnableLocalFiles = populateRunnableLocalFiles(spec);
// Rewrite LocalFiles inside twillSpec
Map<String, RuntimeSpecification> runtimeSpec = Maps.transformEntries(
spec.getRunnables(), new Maps.EntryTransformer<String, RuntimeSpecification, RuntimeSpecification>() {
@Override
public RuntimeSpecification transformEntry(String key, RuntimeSpecification value) {
return new DefaultRuntimeSpecification(value.getName(), value.getRunnableSpecification(),
value.getResourceSpecification(), runnableLocalFiles.get(key));
}
});
// Serialize into a local temp file.
LOG.debug("Creating {}", targetFile);
try (Writer writer = Files.newBufferedWriter(targetFile, StandardCharsets.UTF_8)) {
EventHandlerSpecification eventHandler = spec.getEventHandler();
if (eventHandler == null) {
eventHandler = new LogOnlyEventHandler().configure();
}
TwillSpecification newTwillSpec = new DefaultTwillSpecification(spec.getName(), runtimeSpec, spec.getOrders(),
spec.getPlacementPolicies(), eventHandler);
Map<String, String> configMap = Maps.newHashMap();
for (Map.Entry<String, String> entry : config) {
if (entry.getKey().startsWith("twill.")) {
configMap.put(entry.getKey(), entry.getValue());
}
}
TwillRuntimeSpecification twillRuntimeSpec = new TwillRuntimeSpecification(
newTwillSpec, appLocation.getLocationFactory().getHomeLocation().getName(),
appLocation.toURI(), zkConnectString, runId, twillSpec.getName(),
config.get(YarnConfiguration.RM_SCHEDULER_ADDRESS),
logLevels, maxRetries, configMap, runnableConfigs);
TwillRuntimeSpecificationAdapter.create().toJson(twillRuntimeSpec, writer);
LOG.debug("Done {}", targetFile);
return twillRuntimeSpec;
}
}
private void saveLogback(Path targetFile) throws IOException {
URL url = getClass().getClassLoader().getResource(Constants.Files.LOGBACK_TEMPLATE);
if (url == null) {
return;
}
LOG.debug("Creating {}", targetFile);
try (InputStream is = url.openStream()) {
Files.copy(is, targetFile);
}
LOG.debug("Done {}", targetFile);
}
/**
* Creates the launcher.jar for launch the main application.
*/
private void createLauncherJar(Map<String, LocalFile> localFiles) throws URISyntaxException, IOException {
LOG.debug("Create and copy {}", Constants.Files.LAUNCHER_JAR);
Location location = locationCache.get(Constants.Files.LAUNCHER_JAR, new LocationCache.Loader() {
@Override
public void load(String name, Location targetLocation) throws IOException {
// Create a jar file with the TwillLauncher and FindFreePort and dependent classes inside.
try (JarOutputStream jarOut = new JarOutputStream(targetLocation.getOutputStream())) {
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
if (classLoader == null) {
classLoader = getClass().getClassLoader();
}
Dependencies.findClassDependencies(classLoader, new ClassAcceptor() {
@Override
public boolean accept(String className, URL classUrl, URL classPathUrl) {
try {
jarOut.putNextEntry(new JarEntry(className.replace('.', '/') + ".class"));
try (InputStream is = classUrl.openStream()) {
ByteStreams.copy(is, jarOut);
}
} catch (IOException e) {
throw Throwables.propagate(e);
}
return true;
}
}, TwillLauncher.class.getName(), FindFreePort.class.getName());
}
}
});
LOG.debug("Done {}", Constants.Files.LAUNCHER_JAR);
localFiles.put(Constants.Files.LAUNCHER_JAR, createLocalFile(Constants.Files.LAUNCHER_JAR, location));
}
private void saveClassPaths(Path targetDir) throws IOException {
Files.write(targetDir.resolve(Constants.Files.APPLICATION_CLASSPATH),
Joiner.on(':').join(applicationClassPaths).getBytes(StandardCharsets.UTF_8));
Files.write(targetDir.resolve(Constants.Files.CLASSPATH),
Joiner.on(':').join(classPaths).getBytes(StandardCharsets.UTF_8));
}
private JvmOptions saveJvmOptions(final Path targetPath) throws IOException {
// Append runnable specific extra options.
Map<String, String> runnableExtraOptions = Maps.newHashMap(
Maps.transformValues(this.runnableExtraOptions, new Function<String, String>() {
@Override
public String apply(String options) {
return addClassLoaderClassName(extraOptions.isEmpty() ? options : extraOptions + " " + options);
}
}));
String globalOptions = addClassLoaderClassName(extraOptions);
JvmOptions jvmOptions = new JvmOptions(globalOptions, runnableExtraOptions, debugOptions);
if (globalOptions.isEmpty() && runnableExtraOptions.isEmpty()
&& JvmOptions.DebugOptions.NO_DEBUG.equals(debugOptions)) {
// If no vm options, no need to localize the file.
return jvmOptions;
}
LOG.debug("Creating {}", targetPath);
try (Writer writer = Files.newBufferedWriter(targetPath, StandardCharsets.UTF_8)) {
new Gson().toJson(new JvmOptions(globalOptions, runnableExtraOptions, debugOptions), writer);
}
LOG.debug("Done {}", targetPath);
return jvmOptions;
}
private void saveArguments(Arguments arguments, final Path targetPath) throws IOException {
LOG.debug("Creating {}", targetPath);
ArgumentsCodec.encode(arguments, new OutputSupplier<Writer>() {
@Override
public Writer getOutput() throws IOException {
return Files.newBufferedWriter(targetPath, StandardCharsets.UTF_8);
}
});
LOG.debug("Done {}", targetPath);
}
private void saveEnvironments(Path targetPath) throws IOException {
if (environments.isEmpty()) {
return;
}
LOG.debug("Creating {}", targetPath);
try (Writer writer = Files.newBufferedWriter(targetPath, StandardCharsets.UTF_8)) {
new Gson().toJson(environments, writer);
}
LOG.debug("Done {}", targetPath);
}
/**
* Serializes the information for files that are localized to all YARN containers.
*/
private void createLocalizeFilesJson(Map<String, LocalFile> localFiles) throws IOException {
LOG.debug("Create and copy {}", Constants.Files.LOCALIZE_FILES);
Location location = createTempLocation(Constants.Files.LOCALIZE_FILES);
// Serialize the list of LocalFiles, except the one we are generating here, as this file is used by AM only.
// This file should never use LocationCache.
try (Writer writer = new OutputStreamWriter(location.getOutputStream(), StandardCharsets.UTF_8)) {
new GsonBuilder().registerTypeAdapter(LocalFile.class, new LocalFileCodec())
.create().toJson(localFiles.values(), new TypeToken<List<LocalFile>>() {
}.getType(), writer);
}
LOG.debug("Done {}", Constants.Files.LOCALIZE_FILES);
localFiles.put(Constants.Files.LOCALIZE_FILES, createLocalFile(Constants.Files.LOCALIZE_FILES, location));
}
private Location copyFromURL(URL url, Location target) throws IOException {
try (
InputStream is = url.openStream();
OutputStream os = new BufferedOutputStream(target.getOutputStream())
) {
ByteStreams.copy(is, os);
}
return target;
}
private Location createTempLocation(String fileName) {
String name;
String suffix = Paths.getExtension(fileName);
name = fileName.substring(0, fileName.length() - suffix.length() - 1);
try {
return appLocation.append(name).getTempFile('.' + suffix);
} catch (IOException e) {
throw Throwables.propagate(e);
}
}
/**
* Returns the context ClassLoader if there is any, otherwise, returns ClassLoader of this class.
*/
private ClassLoader getClassLoader() {
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
return classLoader == null ? getClass().getClassLoader() : classLoader;
}
private ApplicationBundler createBundler(ClassAcceptor classAcceptor) {
return new ApplicationBundler(classAcceptor).setTempDir(getLocalStagingDir());
}
}
|
google/j2objc | 35,748 | xalan/third_party/android/platform/external/apache-xml/src/main/java/org/apache/xpath/NodeSet.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* $Id: NodeSet.java 468655 2006-10-28 07:12:06Z minchau $
*/
package org.apache.xpath;
import org.apache.xalan.res.XSLMessages;
import org.apache.xml.utils.DOM2Helper;
import org.apache.xpath.axes.ContextNodeList;
import org.apache.xpath.res.XPATHErrorResources;
import org.w3c.dom.DOMException;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.w3c.dom.traversal.NodeFilter;
import org.w3c.dom.traversal.NodeIterator;
/**
* <p>The NodeSet class can act as either a NodeVector,
* NodeList, or NodeIterator. However, in order for it to
* act as a NodeVector or NodeList, it's required that
* setShouldCacheNodes(true) be called before the first
* nextNode() is called, in order that nodes can be added
* as they are fetched. Derived classes that implement iterators
* must override runTo(int index), in order that they may
* run the iteration to the given index. </p>
*
* <p>Note that we directly implement the DOM's NodeIterator
* interface. We do not emulate all the behavior of the
* standard NodeIterator. In particular, we do not guarantee
* to present a "live view" of the document ... but in XSLT,
* the source document should never be mutated, so this should
* never be an issue.</p>
*
* <p>Thought: Should NodeSet really implement NodeList and NodeIterator,
* or should there be specific subclasses of it which do so? The
* advantage of doing it all here is that all NodeSets will respond
* to the same calls; the disadvantage is that some of them may return
* less-than-enlightening results when you do so.</p>
* @xsl.usage advanced
*/
public class NodeSet
implements NodeList, NodeIterator, Cloneable, ContextNodeList
{
/**
* Create an empty nodelist.
*/
public NodeSet()
{
m_blocksize = 32;
m_mapSize = 0;
}
/**
* Create an empty, using the given block size.
*
* @param blocksize Size of blocks to allocate
*/
public NodeSet(int blocksize)
{
m_blocksize = blocksize;
m_mapSize = 0;
}
/**
* Create a NodeSet, and copy the members of the
* given nodelist into it.
*
* @param nodelist List of Nodes to be made members of the new set.
*/
public NodeSet(NodeList nodelist)
{
this(32);
addNodes(nodelist);
}
/**
* Create a NodeSet, and copy the members of the
* given NodeSet into it.
*
* @param nodelist Set of Nodes to be made members of the new set.
*/
public NodeSet(NodeSet nodelist)
{
this(32);
addNodes((NodeIterator) nodelist);
}
/**
* Create a NodeSet, and copy the members of the
* given NodeIterator into it.
*
* @param ni Iterator which yields Nodes to be made members of the new set.
*/
public NodeSet(NodeIterator ni)
{
this(32);
addNodes(ni);
}
/**
* Create a NodeSet which contains the given Node.
*
* @param node Single node to be added to the new set.
*/
public NodeSet(Node node)
{
this(32);
addNode(node);
}
/**
* @return The root node of the Iterator, as specified when it was created.
* For non-Iterator NodeSets, this will be null.
*/
public Node getRoot()
{
return null;
}
/**
* Get a cloned Iterator, and reset its state to the beginning of the
* iteration.
*
* @return a new NodeSet of the same type, having the same state...
* except that the reset() operation has been called.
*
* @throws CloneNotSupportedException if this subclass of NodeSet
* does not support the clone() operation.
*/
public NodeIterator cloneWithReset() throws CloneNotSupportedException
{
NodeSet clone = (NodeSet) clone();
clone.reset();
return clone;
}
/**
* Reset the iterator. May have no effect on non-iterator Nodesets.
*/
public void reset()
{
m_next = 0;
}
/**
* This attribute determines which node types are presented via the
* iterator. The available set of constants is defined in the
* <code>NodeFilter</code> interface. For NodeSets, the mask has been
* hardcoded to show all nodes except EntityReference nodes, which have
* no equivalent in the XPath data model.
*
* @return integer used as a bit-array, containing flags defined in
* the DOM's NodeFilter class. The value will be
* <code>SHOW_ALL & ~SHOW_ENTITY_REFERENCE</code>, meaning that
* only entity references are suppressed.
*/
public int getWhatToShow()
{
return NodeFilter.SHOW_ALL & ~NodeFilter.SHOW_ENTITY_REFERENCE;
}
/**
* The filter object used to screen nodes. Filters are applied to
* further reduce (and restructure) the NodeIterator's view of the
* document. In our case, we will be using hardcoded filters built
* into our iterators... but getFilter() is part of the DOM's
* NodeIterator interface, so we have to support it.
*
* @return null, which is slightly misleading. True, there is no
* user-written filter object, but in fact we are doing some very
* sophisticated custom filtering. A DOM purist might suggest
* returning a placeholder object just to indicate that this is
* not going to return all nodes selected by whatToShow.
*/
public NodeFilter getFilter()
{
return null;
}
/**
* The value of this flag determines whether the children of entity
* reference nodes are visible to the iterator. If false, they will be
* skipped over.
* <br> To produce a view of the document that has entity references
* expanded and does not expose the entity reference node itself, use the
* whatToShow flags to hide the entity reference node and set
* expandEntityReferences to true when creating the iterator. To produce
* a view of the document that has entity reference nodes but no entity
* expansion, use the whatToShow flags to show the entity reference node
* and set expandEntityReferences to false.
*
* @return true for all iterators based on NodeSet, meaning that the
* contents of EntityRefrence nodes may be returned (though whatToShow
* says that the EntityReferences themselves are not shown.)
*/
public boolean getExpandEntityReferences()
{
return true;
}
/**
* Returns the next node in the set and advances the position of the
* iterator in the set. After a NodeIterator is created, the first call
* to nextNode() returns the first node in the set.
* @return The next <code>Node</code> in the set being iterated over, or
* <code>null</code> if there are no more members in that set.
* @throws DOMException
* INVALID_STATE_ERR: Raised if this method is called after the
* <code>detach</code> method was invoked.
*/
public Node nextNode() throws DOMException
{
if ((m_next) < this.size())
{
Node next = this.elementAt(m_next);
m_next++;
return next;
}
else
return null;
}
/**
* Returns the previous node in the set and moves the position of the
* iterator backwards in the set.
* @return The previous <code>Node</code> in the set being iterated over,
* or<code>null</code> if there are no more members in that set.
* @throws DOMException
* INVALID_STATE_ERR: Raised if this method is called after the
* <code>detach</code> method was invoked.
* @throws RuntimeException thrown if this NodeSet is not of
* a cached type, and hence doesn't know what the previous node was.
*/
public Node previousNode() throws DOMException
{
if (!m_cacheNodes)
throw new RuntimeException(
XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_CANNOT_ITERATE, null)); //"This NodeSet can not iterate to a previous node!");
if ((m_next - 1) > 0)
{
m_next--;
return this.elementAt(m_next);
}
else
return null;
}
/**
* Detaches the iterator from the set which it iterated over, releasing
* any computational resources and placing the iterator in the INVALID
* state. After<code>detach</code> has been invoked, calls to
* <code>nextNode</code> or<code>previousNode</code> will raise the
* exception INVALID_STATE_ERR.
* <p>
* This operation is a no-op in NodeSet, and will not cause
* INVALID_STATE_ERR to be raised by later operations.
* </p>
*/
public void detach(){}
/**
* Tells if this NodeSet is "fresh", in other words, if
* the first nextNode() that is called will return the
* first node in the set.
*
* @return true if nextNode() would return the first node in the set,
* false if it would return a later one.
*/
public boolean isFresh()
{
return (m_next == 0);
}
/**
* If an index is requested, NodeSet will call this method
* to run the iterator to the index. By default this sets
* m_next to the index. If the index argument is -1, this
* signals that the iterator should be run to the end.
*
* @param index Position to advance (or retreat) to, with
* 0 requesting the reset ("fresh") position and -1 (or indeed
* any out-of-bounds value) requesting the final position.
* @throws RuntimeException thrown if this NodeSet is not
* one of the types which supports indexing/counting.
*/
public void runTo(int index)
{
if (!m_cacheNodes)
throw new RuntimeException(
XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_CANNOT_INDEX, null)); //"This NodeSet can not do indexing or counting functions!");
if ((index >= 0) && (m_next < m_firstFree))
m_next = index;
else
m_next = m_firstFree - 1;
}
/**
* Returns the <code>index</code>th item in the collection. If
* <code>index</code> is greater than or equal to the number of nodes in
* the list, this returns <code>null</code>.
*
* TODO: What happens if index is out of range?
*
* @param index Index into the collection.
* @return The node at the <code>index</code>th position in the
* <code>NodeList</code>, or <code>null</code> if that is not a valid
* index.
*/
public Node item(int index)
{
runTo(index);
return (Node) this.elementAt(index);
}
/**
* The number of nodes in the list. The range of valid child node indices is
* 0 to <code>length-1</code> inclusive. Note that this operation requires
* finding all the matching nodes, which may defeat attempts to defer
* that work.
*
* @return integer indicating how many nodes are represented by this list.
*/
public int getLength()
{
runTo(-1);
return this.size();
}
/**
* Add a node to the NodeSet. Not all types of NodeSets support this
* operation
*
* @param n Node to be added
* @throws RuntimeException thrown if this NodeSet is not of
* a mutable type.
*/
public void addNode(Node n)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //"This NodeSet is not mutable!");
this.addElement(n);
}
/**
* Insert a node at a given position.
*
* @param n Node to be added
* @param pos Offset at which the node is to be inserted,
* with 0 being the first position.
* @throws RuntimeException thrown if this NodeSet is not of
* a mutable type.
*/
public void insertNode(Node n, int pos)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //"This NodeSet is not mutable!");
insertElementAt(n, pos);
}
/**
* Remove a node.
*
* @param n Node to be added
* @throws RuntimeException thrown if this NodeSet is not of
* a mutable type.
*/
public void removeNode(Node n)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //"This NodeSet is not mutable!");
this.removeElement(n);
}
/**
* Copy NodeList members into this nodelist, adding in
* document order. If a node is null, don't add it.
*
* @param nodelist List of nodes which should now be referenced by
* this NodeSet.
* @throws RuntimeException thrown if this NodeSet is not of
* a mutable type.
*/
public void addNodes(NodeList nodelist)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //"This NodeSet is not mutable!");
if (null != nodelist) // defensive to fix a bug that Sanjiva reported.
{
int nChildren = nodelist.getLength();
for (int i = 0; i < nChildren; i++)
{
Node obj = nodelist.item(i);
if (null != obj)
{
addElement(obj);
}
}
}
// checkDups();
}
/**
* <p>Copy NodeList members into this nodelist, adding in
* document order. Only genuine node references will be copied;
* nulls appearing in the source NodeSet will
* not be added to this one. </p>
*
* <p> In case you're wondering why this function is needed: NodeSet
* implements both NodeIterator and NodeList. If this method isn't
* provided, Java can't decide which of those to use when addNodes()
* is invoked. Providing the more-explicit match avoids that
* ambiguity.)</p>
*
* @param ns NodeSet whose members should be merged into this NodeSet.
* @throws RuntimeException thrown if this NodeSet is not of
* a mutable type.
*/
public void addNodes(NodeSet ns)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //"This NodeSet is not mutable!");
addNodes((NodeIterator) ns);
}
/**
* Copy NodeList members into this nodelist, adding in
* document order. Null references are not added.
*
* @param iterator NodeIterator which yields the nodes to be added.
* @throws RuntimeException thrown if this NodeSet is not of
* a mutable type.
*/
public void addNodes(NodeIterator iterator)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //"This NodeSet is not mutable!");
if (null != iterator) // defensive to fix a bug that Sanjiva reported.
{
Node obj;
while (null != (obj = iterator.nextNode()))
{
addElement(obj);
}
}
// checkDups();
}
/**
* Copy NodeList members into this nodelist, adding in
* document order. If a node is null, don't add it.
*
* @param nodelist List of nodes to be added
* @param support The XPath runtime context.
* @throws RuntimeException thrown if this NodeSet is not of
* a mutable type.
*/
public void addNodesInDocOrder(NodeList nodelist, XPathContext support)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //"This NodeSet is not mutable!");
int nChildren = nodelist.getLength();
for (int i = 0; i < nChildren; i++)
{
Node node = nodelist.item(i);
if (null != node)
{
addNodeInDocOrder(node, support);
}
}
}
/**
* Copy NodeList members into this nodelist, adding in
* document order. If a node is null, don't add it.
*
* @param iterator NodeIterator which yields the nodes to be added.
* @param support The XPath runtime context.
* @throws RuntimeException thrown if this NodeSet is not of
* a mutable type.
*/
public void addNodesInDocOrder(NodeIterator iterator, XPathContext support)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //"This NodeSet is not mutable!");
Node node;
while (null != (node = iterator.nextNode()))
{
addNodeInDocOrder(node, support);
}
}
/**
* Add the node list to this node set in document order.
*
* @param start index.
* @param end index.
* @param testIndex index.
* @param nodelist The nodelist to add.
* @param support The XPath runtime context.
*
* @return false always.
* @throws RuntimeException thrown if this NodeSet is not of
* a mutable type.
*/
private boolean addNodesInDocOrder(int start, int end, int testIndex,
NodeList nodelist, XPathContext support)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //"This NodeSet is not mutable!");
boolean foundit = false;
int i;
Node node = nodelist.item(testIndex);
for (i = end; i >= start; i--)
{
Node child = (Node) elementAt(i);
if (child == node)
{
i = -2; // Duplicate, suppress insert
break;
}
if (!DOM2Helper.isNodeAfter(node, child))
{
insertElementAt(node, i + 1);
testIndex--;
if (testIndex > 0)
{
boolean foundPrev = addNodesInDocOrder(0, i, testIndex, nodelist,
support);
if (!foundPrev)
{
addNodesInDocOrder(i, size() - 1, testIndex, nodelist, support);
}
}
break;
}
}
if (i == -1)
{
insertElementAt(node, 0);
}
return foundit;
}
/**
* Add the node into a vector of nodes where it should occur in
* document order.
* @param node The node to be added.
* @param test true if we should test for doc order
* @param support The XPath runtime context.
* @return insertIndex.
* @throws RuntimeException thrown if this NodeSet is not of
* a mutable type.
*/
public int addNodeInDocOrder(Node node, boolean test, XPathContext support)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //"This NodeSet is not mutable!");
int insertIndex = -1;
if (test)
{
// This needs to do a binary search, but a binary search
// is somewhat tough because the sequence test involves
// two nodes.
int size = size(), i;
for (i = size - 1; i >= 0; i--)
{
Node child = (Node) elementAt(i);
if (child == node)
{
i = -2; // Duplicate, suppress insert
break;
}
if (!DOM2Helper.isNodeAfter(node, child))
{
break;
}
}
if (i != -2)
{
insertIndex = i + 1;
insertElementAt(node, insertIndex);
}
}
else
{
insertIndex = this.size();
boolean foundit = false;
for (int i = 0; i < insertIndex; i++)
{
if (this.item(i).equals(node))
{
foundit = true;
break;
}
}
if (!foundit)
addElement(node);
}
// checkDups();
return insertIndex;
} // end addNodeInDocOrder(Vector v, Object obj)
/**
* Add the node into a vector of nodes where it should occur in
* document order.
* @param node The node to be added.
* @param support The XPath runtime context.
*
* @return The index where it was inserted.
* @throws RuntimeException thrown if this NodeSet is not of
* a mutable type.
*/
public int addNodeInDocOrder(Node node, XPathContext support)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //"This NodeSet is not mutable!");
return addNodeInDocOrder(node, true, support);
} // end addNodeInDocOrder(Vector v, Object obj)
/** If this node is being used as an iterator, the next index that nextNode()
* will return. */
transient protected int m_next = 0;
/**
* Get the current position, which is one less than
* the next nextNode() call will retrieve. i.e. if
* you call getCurrentPos() and the return is 0, the next
* fetch will take place at index 1.
*
* @return The the current position index.
*/
public int getCurrentPos()
{
return m_next;
}
/**
* Set the current position in the node set.
* @param i Must be a valid index.
* @throws RuntimeException thrown if this NodeSet is not of
* a cached type, and thus doesn't permit indexed access.
*/
public void setCurrentPos(int i)
{
if (!m_cacheNodes)
throw new RuntimeException(
XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_CANNOT_INDEX, null)); //"This NodeSet can not do indexing or counting functions!");
m_next = i;
}
/**
* Return the last fetched node. Needed to support the UnionPathIterator.
*
* @return the last fetched node.
* @throws RuntimeException thrown if this NodeSet is not of
* a cached type, and thus doesn't permit indexed access.
*/
public Node getCurrentNode()
{
if (!m_cacheNodes)
throw new RuntimeException(
XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_CANNOT_INDEX, null)); //"This NodeSet can not do indexing or counting functions!");
int saved = m_next;
Node n = (m_next < m_firstFree) ? elementAt(m_next) : null;
m_next = saved; // HACK: I think this is a bit of a hack. -sb
return n;
}
/** True if this list can be mutated. */
transient protected boolean m_mutable = true;
/** True if this list is cached.
* @serial */
transient protected boolean m_cacheNodes = true;
/**
* Get whether or not this is a cached node set.
*
*
* @return True if this list is cached.
*/
public boolean getShouldCacheNodes()
{
return m_cacheNodes;
}
/**
* If setShouldCacheNodes(true) is called, then nodes will
* be cached. They are not cached by default. This switch must
* be set before the first call to nextNode is made, to ensure
* that all nodes are cached.
*
* @param b true if this node set should be cached.
* @throws RuntimeException thrown if an attempt is made to
* request caching after we've already begun stepping through the
* nodes in this set.
*/
public void setShouldCacheNodes(boolean b)
{
if (!isFresh())
throw new RuntimeException(
XSLMessages.createXPATHMessage(XPATHErrorResources.ER_CANNOT_CALL_SETSHOULDCACHENODE, null)); //"Can not call setShouldCacheNodes after nextNode has been called!");
m_cacheNodes = b;
m_mutable = true;
}
transient private int m_last = 0;
public int getLast()
{
return m_last;
}
public void setLast(int last)
{
m_last = last;
}
/** Size of blocks to allocate.
* @serial */
private int m_blocksize;
/** Array of nodes this points to.
* @serial */
Node m_map[];
/** Number of nodes in this NodeVector.
* @serial */
protected int m_firstFree = 0;
/** Size of the array this points to.
* @serial */
private int m_mapSize; // lazy initialization
/**
* Get a cloned LocPathIterator.
*
* @return A clone of this
*
* @throws CloneNotSupportedException
*/
public Object clone() throws CloneNotSupportedException
{
NodeSet clone = (NodeSet) super.clone();
if ((null != this.m_map) && (this.m_map == clone.m_map))
{
clone.m_map = new Node[this.m_map.length];
System.arraycopy(this.m_map, 0, clone.m_map, 0, this.m_map.length);
}
return clone;
}
/**
* Get the length of the list.
*
* @return Number of nodes in this NodeVector
*/
public int size()
{
return m_firstFree;
}
/**
* Append a Node onto the vector.
*
* @param value Node to add to the vector
*/
public void addElement(Node value)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //"This NodeSet is not mutable!");
if ((m_firstFree + 1) >= m_mapSize)
{
if (null == m_map)
{
m_map = new Node[m_blocksize];
m_mapSize = m_blocksize;
}
else
{
m_mapSize += m_blocksize;
Node newMap[] = new Node[m_mapSize];
System.arraycopy(m_map, 0, newMap, 0, m_firstFree + 1);
m_map = newMap;
}
}
m_map[m_firstFree] = value;
m_firstFree++;
}
/**
* Append a Node onto the vector.
*
* @param value Node to add to the vector
*/
public final void push(Node value)
{
int ff = m_firstFree;
if ((ff + 1) >= m_mapSize)
{
if (null == m_map)
{
m_map = new Node[m_blocksize];
m_mapSize = m_blocksize;
}
else
{
m_mapSize += m_blocksize;
Node newMap[] = new Node[m_mapSize];
System.arraycopy(m_map, 0, newMap, 0, ff + 1);
m_map = newMap;
}
}
m_map[ff] = value;
ff++;
m_firstFree = ff;
}
/**
* Pop a node from the tail of the vector and return the result.
*
* @return the node at the tail of the vector
*/
public final Node pop()
{
m_firstFree--;
Node n = m_map[m_firstFree];
m_map[m_firstFree] = null;
return n;
}
/**
* Pop a node from the tail of the vector and return the
* top of the stack after the pop.
*
* @return The top of the stack after it's been popped
*/
public final Node popAndTop()
{
m_firstFree--;
m_map[m_firstFree] = null;
return (m_firstFree == 0) ? null : m_map[m_firstFree - 1];
}
/**
* Pop a node from the tail of the vector.
*/
public final void popQuick()
{
m_firstFree--;
m_map[m_firstFree] = null;
}
/**
* Return the node at the top of the stack without popping the stack.
* Special purpose method for TransformerImpl, pushElemTemplateElement.
* Performance critical.
*
* @return Node at the top of the stack or null if stack is empty.
*/
public final Node peepOrNull()
{
return ((null != m_map) && (m_firstFree > 0))
? m_map[m_firstFree - 1] : null;
}
/**
* Push a pair of nodes into the stack.
* Special purpose method for TransformerImpl, pushElemTemplateElement.
* Performance critical.
*
* @param v1 First node to add to vector
* @param v2 Second node to add to vector
*/
public final void pushPair(Node v1, Node v2)
{
if (null == m_map)
{
m_map = new Node[m_blocksize];
m_mapSize = m_blocksize;
}
else
{
if ((m_firstFree + 2) >= m_mapSize)
{
m_mapSize += m_blocksize;
Node newMap[] = new Node[m_mapSize];
System.arraycopy(m_map, 0, newMap, 0, m_firstFree);
m_map = newMap;
}
}
m_map[m_firstFree] = v1;
m_map[m_firstFree + 1] = v2;
m_firstFree += 2;
}
/**
* Pop a pair of nodes from the tail of the stack.
* Special purpose method for TransformerImpl, pushElemTemplateElement.
* Performance critical.
*/
public final void popPair()
{
m_firstFree -= 2;
m_map[m_firstFree] = null;
m_map[m_firstFree + 1] = null;
}
/**
* Set the tail of the stack to the given node.
* Special purpose method for TransformerImpl, pushElemTemplateElement.
* Performance critical.
*
* @param n Node to set at the tail of vector
*/
public final void setTail(Node n)
{
m_map[m_firstFree - 1] = n;
}
/**
* Set the given node one position from the tail.
* Special purpose method for TransformerImpl, pushElemTemplateElement.
* Performance critical.
*
* @param n Node to set
*/
public final void setTailSub1(Node n)
{
m_map[m_firstFree - 2] = n;
}
/**
* Return the node at the tail of the vector without popping
* Special purpose method for TransformerImpl, pushElemTemplateElement.
* Performance critical.
*
* @return Node at the tail of the vector
*/
public final Node peepTail()
{
return m_map[m_firstFree - 1];
}
/**
* Return the node one position from the tail without popping.
* Special purpose method for TransformerImpl, pushElemTemplateElement.
* Performance critical.
*
* @return Node one away from the tail
*/
public final Node peepTailSub1()
{
return m_map[m_firstFree - 2];
}
/**
* Inserts the specified node in this vector at the specified index.
* Each component in this vector with an index greater or equal to
* the specified index is shifted upward to have an index one greater
* than the value it had previously.
*
* @param value Node to insert
* @param at Position where to insert
*/
public void insertElementAt(Node value, int at)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //"This NodeSet is not mutable!");
if (null == m_map)
{
m_map = new Node[m_blocksize];
m_mapSize = m_blocksize;
}
else if ((m_firstFree + 1) >= m_mapSize)
{
m_mapSize += m_blocksize;
Node newMap[] = new Node[m_mapSize];
System.arraycopy(m_map, 0, newMap, 0, m_firstFree + 1);
m_map = newMap;
}
if (at <= (m_firstFree - 1))
{
System.arraycopy(m_map, at, m_map, at + 1, m_firstFree - at);
}
m_map[at] = value;
m_firstFree++;
}
/**
* Append the nodes to the list.
*
* @param nodes NodeVector to append to this list
*/
public void appendNodes(NodeSet nodes)
{
int nNodes = nodes.size();
if (null == m_map)
{
m_mapSize = nNodes + m_blocksize;
m_map = new Node[m_mapSize];
}
else if ((m_firstFree + nNodes) >= m_mapSize)
{
m_mapSize += (nNodes + m_blocksize);
Node newMap[] = new Node[m_mapSize];
System.arraycopy(m_map, 0, newMap, 0, m_firstFree + nNodes);
m_map = newMap;
}
System.arraycopy(nodes.m_map, 0, m_map, m_firstFree, nNodes);
m_firstFree += nNodes;
}
/**
* Inserts the specified node in this vector at the specified index.
* Each component in this vector with an index greater or equal to
* the specified index is shifted upward to have an index one greater
* than the value it had previously.
*/
public void removeAllElements()
{
if (null == m_map)
return;
for (int i = 0; i < m_firstFree; i++)
{
m_map[i] = null;
}
m_firstFree = 0;
}
/**
* Removes the first occurrence of the argument from this vector.
* If the object is found in this vector, each component in the vector
* with an index greater or equal to the object's index is shifted
* downward to have an index one smaller than the value it had
* previously.
*
* @param s Node to remove from the list
*
* @return True if the node was successfully removed
*/
public boolean removeElement(Node s)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //"This NodeSet is not mutable!");
if (null == m_map)
return false;
for (int i = 0; i < m_firstFree; i++)
{
Node node = m_map[i];
if ((null != node) && node.equals(s))
{
if (i < m_firstFree - 1)
System.arraycopy(m_map, i + 1, m_map, i, m_firstFree - i - 1);
m_firstFree--;
m_map[m_firstFree] = null;
return true;
}
}
return false;
}
/**
* Deletes the component at the specified index. Each component in
* this vector with an index greater or equal to the specified
* index is shifted downward to have an index one smaller than
* the value it had previously.
*
* @param i Index of node to remove
*/
public void removeElementAt(int i)
{
if (null == m_map)
return;
if (i >= m_firstFree)
throw new ArrayIndexOutOfBoundsException(i + " >= " + m_firstFree);
else if (i < 0)
throw new ArrayIndexOutOfBoundsException(i);
if (i < m_firstFree - 1)
System.arraycopy(m_map, i + 1, m_map, i, m_firstFree - i - 1);
m_firstFree--;
m_map[m_firstFree] = null;
}
/**
* Sets the component at the specified index of this vector to be the
* specified object. The previous component at that position is discarded.
*
* The index must be a value greater than or equal to 0 and less
* than the current size of the vector.
*
* @param node Node to set
* @param index Index of where to set the node
*/
public void setElementAt(Node node, int index)
{
if (!m_mutable)
throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESET_NOT_MUTABLE, null)); //"This NodeSet is not mutable!");
if (null == m_map)
{
m_map = new Node[m_blocksize];
m_mapSize = m_blocksize;
}
m_map[index] = node;
}
/**
* Get the nth element.
*
* @param i Index of node to get
*
* @return Node at specified index
*/
public Node elementAt(int i)
{
if (null == m_map)
return null;
return m_map[i];
}
/**
* Tell if the table contains the given node.
*
* @param s Node to look for
*
* @return True if the given node was found.
*/
public boolean contains(Node s)
{
runTo(-1);
if (null == m_map)
return false;
for (int i = 0; i < m_firstFree; i++)
{
Node node = m_map[i];
if ((null != node) && node.equals(s))
return true;
}
return false;
}
/**
* Searches for the first occurence of the given argument,
* beginning the search at index, and testing for equality
* using the equals method.
*
* @param elem Node to look for
* @param index Index of where to start the search
* @return the index of the first occurrence of the object
* argument in this vector at position index or later in the
* vector; returns -1 if the object is not found.
*/
public int indexOf(Node elem, int index)
{
runTo(-1);
if (null == m_map)
return -1;
for (int i = index; i < m_firstFree; i++)
{
Node node = m_map[i];
if ((null != node) && node.equals(elem))
return i;
}
return -1;
}
/**
* Searches for the first occurence of the given argument,
* beginning the search at index, and testing for equality
* using the equals method.
*
* @param elem Node to look for
* @return the index of the first occurrence of the object
* argument in this vector at position index or later in the
* vector; returns -1 if the object is not found.
*/
public int indexOf(Node elem)
{
runTo(-1);
if (null == m_map)
return -1;
for (int i = 0; i < m_firstFree; i++)
{
Node node = m_map[i];
if ((null != node) && node.equals(elem))
return i;
}
return -1;
}
}
|
googleapis/google-cloud-java | 35,957 | java-cloudquotas/proto-google-cloud-cloudquotas-v1/src/main/java/com/google/api/cloudquotas/v1/ListQuotaInfosResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/api/cloudquotas/v1/cloudquotas.proto
// Protobuf Java Version: 3.25.8
package com.google.api.cloudquotas.v1;
/**
*
*
* <pre>
* Message for response to listing QuotaInfos
* </pre>
*
* Protobuf type {@code google.api.cloudquotas.v1.ListQuotaInfosResponse}
*/
public final class ListQuotaInfosResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.api.cloudquotas.v1.ListQuotaInfosResponse)
ListQuotaInfosResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListQuotaInfosResponse.newBuilder() to construct.
private ListQuotaInfosResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListQuotaInfosResponse() {
quotaInfos_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListQuotaInfosResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.api.cloudquotas.v1.CloudquotasProto
.internal_static_google_api_cloudquotas_v1_ListQuotaInfosResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.api.cloudquotas.v1.CloudquotasProto
.internal_static_google_api_cloudquotas_v1_ListQuotaInfosResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.api.cloudquotas.v1.ListQuotaInfosResponse.class,
com.google.api.cloudquotas.v1.ListQuotaInfosResponse.Builder.class);
}
public static final int QUOTA_INFOS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.api.cloudquotas.v1.QuotaInfo> quotaInfos_;
/**
*
*
* <pre>
* The list of QuotaInfo
* </pre>
*
* <code>repeated .google.api.cloudquotas.v1.QuotaInfo quota_infos = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.api.cloudquotas.v1.QuotaInfo> getQuotaInfosList() {
return quotaInfos_;
}
/**
*
*
* <pre>
* The list of QuotaInfo
* </pre>
*
* <code>repeated .google.api.cloudquotas.v1.QuotaInfo quota_infos = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.api.cloudquotas.v1.QuotaInfoOrBuilder>
getQuotaInfosOrBuilderList() {
return quotaInfos_;
}
/**
*
*
* <pre>
* The list of QuotaInfo
* </pre>
*
* <code>repeated .google.api.cloudquotas.v1.QuotaInfo quota_infos = 1;</code>
*/
@java.lang.Override
public int getQuotaInfosCount() {
return quotaInfos_.size();
}
/**
*
*
* <pre>
* The list of QuotaInfo
* </pre>
*
* <code>repeated .google.api.cloudquotas.v1.QuotaInfo quota_infos = 1;</code>
*/
@java.lang.Override
public com.google.api.cloudquotas.v1.QuotaInfo getQuotaInfos(int index) {
return quotaInfos_.get(index);
}
/**
*
*
* <pre>
* The list of QuotaInfo
* </pre>
*
* <code>repeated .google.api.cloudquotas.v1.QuotaInfo quota_infos = 1;</code>
*/
@java.lang.Override
public com.google.api.cloudquotas.v1.QuotaInfoOrBuilder getQuotaInfosOrBuilder(int index) {
return quotaInfos_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < quotaInfos_.size(); i++) {
output.writeMessage(1, quotaInfos_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < quotaInfos_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, quotaInfos_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.api.cloudquotas.v1.ListQuotaInfosResponse)) {
return super.equals(obj);
}
com.google.api.cloudquotas.v1.ListQuotaInfosResponse other =
(com.google.api.cloudquotas.v1.ListQuotaInfosResponse) obj;
if (!getQuotaInfosList().equals(other.getQuotaInfosList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getQuotaInfosCount() > 0) {
hash = (37 * hash) + QUOTA_INFOS_FIELD_NUMBER;
hash = (53 * hash) + getQuotaInfosList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.api.cloudquotas.v1.ListQuotaInfosResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.cloudquotas.v1.ListQuotaInfosResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.cloudquotas.v1.ListQuotaInfosResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.cloudquotas.v1.ListQuotaInfosResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.cloudquotas.v1.ListQuotaInfosResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.cloudquotas.v1.ListQuotaInfosResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.cloudquotas.v1.ListQuotaInfosResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.api.cloudquotas.v1.ListQuotaInfosResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.api.cloudquotas.v1.ListQuotaInfosResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.api.cloudquotas.v1.ListQuotaInfosResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.api.cloudquotas.v1.ListQuotaInfosResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.api.cloudquotas.v1.ListQuotaInfosResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.api.cloudquotas.v1.ListQuotaInfosResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Message for response to listing QuotaInfos
* </pre>
*
* Protobuf type {@code google.api.cloudquotas.v1.ListQuotaInfosResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.api.cloudquotas.v1.ListQuotaInfosResponse)
com.google.api.cloudquotas.v1.ListQuotaInfosResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.api.cloudquotas.v1.CloudquotasProto
.internal_static_google_api_cloudquotas_v1_ListQuotaInfosResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.api.cloudquotas.v1.CloudquotasProto
.internal_static_google_api_cloudquotas_v1_ListQuotaInfosResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.api.cloudquotas.v1.ListQuotaInfosResponse.class,
com.google.api.cloudquotas.v1.ListQuotaInfosResponse.Builder.class);
}
// Construct using com.google.api.cloudquotas.v1.ListQuotaInfosResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (quotaInfosBuilder_ == null) {
quotaInfos_ = java.util.Collections.emptyList();
} else {
quotaInfos_ = null;
quotaInfosBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.api.cloudquotas.v1.CloudquotasProto
.internal_static_google_api_cloudquotas_v1_ListQuotaInfosResponse_descriptor;
}
@java.lang.Override
public com.google.api.cloudquotas.v1.ListQuotaInfosResponse getDefaultInstanceForType() {
return com.google.api.cloudquotas.v1.ListQuotaInfosResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.api.cloudquotas.v1.ListQuotaInfosResponse build() {
com.google.api.cloudquotas.v1.ListQuotaInfosResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.api.cloudquotas.v1.ListQuotaInfosResponse buildPartial() {
com.google.api.cloudquotas.v1.ListQuotaInfosResponse result =
new com.google.api.cloudquotas.v1.ListQuotaInfosResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.api.cloudquotas.v1.ListQuotaInfosResponse result) {
if (quotaInfosBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
quotaInfos_ = java.util.Collections.unmodifiableList(quotaInfos_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.quotaInfos_ = quotaInfos_;
} else {
result.quotaInfos_ = quotaInfosBuilder_.build();
}
}
private void buildPartial0(com.google.api.cloudquotas.v1.ListQuotaInfosResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.api.cloudquotas.v1.ListQuotaInfosResponse) {
return mergeFrom((com.google.api.cloudquotas.v1.ListQuotaInfosResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.api.cloudquotas.v1.ListQuotaInfosResponse other) {
if (other == com.google.api.cloudquotas.v1.ListQuotaInfosResponse.getDefaultInstance())
return this;
if (quotaInfosBuilder_ == null) {
if (!other.quotaInfos_.isEmpty()) {
if (quotaInfos_.isEmpty()) {
quotaInfos_ = other.quotaInfos_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureQuotaInfosIsMutable();
quotaInfos_.addAll(other.quotaInfos_);
}
onChanged();
}
} else {
if (!other.quotaInfos_.isEmpty()) {
if (quotaInfosBuilder_.isEmpty()) {
quotaInfosBuilder_.dispose();
quotaInfosBuilder_ = null;
quotaInfos_ = other.quotaInfos_;
bitField0_ = (bitField0_ & ~0x00000001);
quotaInfosBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getQuotaInfosFieldBuilder()
: null;
} else {
quotaInfosBuilder_.addAllMessages(other.quotaInfos_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.api.cloudquotas.v1.QuotaInfo m =
input.readMessage(
com.google.api.cloudquotas.v1.QuotaInfo.parser(), extensionRegistry);
if (quotaInfosBuilder_ == null) {
ensureQuotaInfosIsMutable();
quotaInfos_.add(m);
} else {
quotaInfosBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.api.cloudquotas.v1.QuotaInfo> quotaInfos_ =
java.util.Collections.emptyList();
private void ensureQuotaInfosIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
quotaInfos_ = new java.util.ArrayList<com.google.api.cloudquotas.v1.QuotaInfo>(quotaInfos_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.api.cloudquotas.v1.QuotaInfo,
com.google.api.cloudquotas.v1.QuotaInfo.Builder,
com.google.api.cloudquotas.v1.QuotaInfoOrBuilder>
quotaInfosBuilder_;
/**
*
*
* <pre>
* The list of QuotaInfo
* </pre>
*
* <code>repeated .google.api.cloudquotas.v1.QuotaInfo quota_infos = 1;</code>
*/
public java.util.List<com.google.api.cloudquotas.v1.QuotaInfo> getQuotaInfosList() {
if (quotaInfosBuilder_ == null) {
return java.util.Collections.unmodifiableList(quotaInfos_);
} else {
return quotaInfosBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of QuotaInfo
* </pre>
*
* <code>repeated .google.api.cloudquotas.v1.QuotaInfo quota_infos = 1;</code>
*/
public int getQuotaInfosCount() {
if (quotaInfosBuilder_ == null) {
return quotaInfos_.size();
} else {
return quotaInfosBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of QuotaInfo
* </pre>
*
* <code>repeated .google.api.cloudquotas.v1.QuotaInfo quota_infos = 1;</code>
*/
public com.google.api.cloudquotas.v1.QuotaInfo getQuotaInfos(int index) {
if (quotaInfosBuilder_ == null) {
return quotaInfos_.get(index);
} else {
return quotaInfosBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of QuotaInfo
* </pre>
*
* <code>repeated .google.api.cloudquotas.v1.QuotaInfo quota_infos = 1;</code>
*/
public Builder setQuotaInfos(int index, com.google.api.cloudquotas.v1.QuotaInfo value) {
if (quotaInfosBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureQuotaInfosIsMutable();
quotaInfos_.set(index, value);
onChanged();
} else {
quotaInfosBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of QuotaInfo
* </pre>
*
* <code>repeated .google.api.cloudquotas.v1.QuotaInfo quota_infos = 1;</code>
*/
public Builder setQuotaInfos(
int index, com.google.api.cloudquotas.v1.QuotaInfo.Builder builderForValue) {
if (quotaInfosBuilder_ == null) {
ensureQuotaInfosIsMutable();
quotaInfos_.set(index, builderForValue.build());
onChanged();
} else {
quotaInfosBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of QuotaInfo
* </pre>
*
* <code>repeated .google.api.cloudquotas.v1.QuotaInfo quota_infos = 1;</code>
*/
public Builder addQuotaInfos(com.google.api.cloudquotas.v1.QuotaInfo value) {
if (quotaInfosBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureQuotaInfosIsMutable();
quotaInfos_.add(value);
onChanged();
} else {
quotaInfosBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of QuotaInfo
* </pre>
*
* <code>repeated .google.api.cloudquotas.v1.QuotaInfo quota_infos = 1;</code>
*/
public Builder addQuotaInfos(int index, com.google.api.cloudquotas.v1.QuotaInfo value) {
if (quotaInfosBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureQuotaInfosIsMutable();
quotaInfos_.add(index, value);
onChanged();
} else {
quotaInfosBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of QuotaInfo
* </pre>
*
* <code>repeated .google.api.cloudquotas.v1.QuotaInfo quota_infos = 1;</code>
*/
public Builder addQuotaInfos(com.google.api.cloudquotas.v1.QuotaInfo.Builder builderForValue) {
if (quotaInfosBuilder_ == null) {
ensureQuotaInfosIsMutable();
quotaInfos_.add(builderForValue.build());
onChanged();
} else {
quotaInfosBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of QuotaInfo
* </pre>
*
* <code>repeated .google.api.cloudquotas.v1.QuotaInfo quota_infos = 1;</code>
*/
public Builder addQuotaInfos(
int index, com.google.api.cloudquotas.v1.QuotaInfo.Builder builderForValue) {
if (quotaInfosBuilder_ == null) {
ensureQuotaInfosIsMutable();
quotaInfos_.add(index, builderForValue.build());
onChanged();
} else {
quotaInfosBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of QuotaInfo
* </pre>
*
* <code>repeated .google.api.cloudquotas.v1.QuotaInfo quota_infos = 1;</code>
*/
public Builder addAllQuotaInfos(
java.lang.Iterable<? extends com.google.api.cloudquotas.v1.QuotaInfo> values) {
if (quotaInfosBuilder_ == null) {
ensureQuotaInfosIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, quotaInfos_);
onChanged();
} else {
quotaInfosBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of QuotaInfo
* </pre>
*
* <code>repeated .google.api.cloudquotas.v1.QuotaInfo quota_infos = 1;</code>
*/
public Builder clearQuotaInfos() {
if (quotaInfosBuilder_ == null) {
quotaInfos_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
quotaInfosBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of QuotaInfo
* </pre>
*
* <code>repeated .google.api.cloudquotas.v1.QuotaInfo quota_infos = 1;</code>
*/
public Builder removeQuotaInfos(int index) {
if (quotaInfosBuilder_ == null) {
ensureQuotaInfosIsMutable();
quotaInfos_.remove(index);
onChanged();
} else {
quotaInfosBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of QuotaInfo
* </pre>
*
* <code>repeated .google.api.cloudquotas.v1.QuotaInfo quota_infos = 1;</code>
*/
public com.google.api.cloudquotas.v1.QuotaInfo.Builder getQuotaInfosBuilder(int index) {
return getQuotaInfosFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of QuotaInfo
* </pre>
*
* <code>repeated .google.api.cloudquotas.v1.QuotaInfo quota_infos = 1;</code>
*/
public com.google.api.cloudquotas.v1.QuotaInfoOrBuilder getQuotaInfosOrBuilder(int index) {
if (quotaInfosBuilder_ == null) {
return quotaInfos_.get(index);
} else {
return quotaInfosBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of QuotaInfo
* </pre>
*
* <code>repeated .google.api.cloudquotas.v1.QuotaInfo quota_infos = 1;</code>
*/
public java.util.List<? extends com.google.api.cloudquotas.v1.QuotaInfoOrBuilder>
getQuotaInfosOrBuilderList() {
if (quotaInfosBuilder_ != null) {
return quotaInfosBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(quotaInfos_);
}
}
/**
*
*
* <pre>
* The list of QuotaInfo
* </pre>
*
* <code>repeated .google.api.cloudquotas.v1.QuotaInfo quota_infos = 1;</code>
*/
public com.google.api.cloudquotas.v1.QuotaInfo.Builder addQuotaInfosBuilder() {
return getQuotaInfosFieldBuilder()
.addBuilder(com.google.api.cloudquotas.v1.QuotaInfo.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of QuotaInfo
* </pre>
*
* <code>repeated .google.api.cloudquotas.v1.QuotaInfo quota_infos = 1;</code>
*/
public com.google.api.cloudquotas.v1.QuotaInfo.Builder addQuotaInfosBuilder(int index) {
return getQuotaInfosFieldBuilder()
.addBuilder(index, com.google.api.cloudquotas.v1.QuotaInfo.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of QuotaInfo
* </pre>
*
* <code>repeated .google.api.cloudquotas.v1.QuotaInfo quota_infos = 1;</code>
*/
public java.util.List<com.google.api.cloudquotas.v1.QuotaInfo.Builder>
getQuotaInfosBuilderList() {
return getQuotaInfosFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.api.cloudquotas.v1.QuotaInfo,
com.google.api.cloudquotas.v1.QuotaInfo.Builder,
com.google.api.cloudquotas.v1.QuotaInfoOrBuilder>
getQuotaInfosFieldBuilder() {
if (quotaInfosBuilder_ == null) {
quotaInfosBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.api.cloudquotas.v1.QuotaInfo,
com.google.api.cloudquotas.v1.QuotaInfo.Builder,
com.google.api.cloudquotas.v1.QuotaInfoOrBuilder>(
quotaInfos_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
quotaInfos_ = null;
}
return quotaInfosBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.api.cloudquotas.v1.ListQuotaInfosResponse)
}
// @@protoc_insertion_point(class_scope:google.api.cloudquotas.v1.ListQuotaInfosResponse)
private static final com.google.api.cloudquotas.v1.ListQuotaInfosResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.api.cloudquotas.v1.ListQuotaInfosResponse();
}
public static com.google.api.cloudquotas.v1.ListQuotaInfosResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListQuotaInfosResponse> PARSER =
new com.google.protobuf.AbstractParser<ListQuotaInfosResponse>() {
@java.lang.Override
public ListQuotaInfosResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListQuotaInfosResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListQuotaInfosResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.api.cloudquotas.v1.ListQuotaInfosResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/sis | 35,843 | endorsed/src/org.apache.sis.util/main/org/apache/sis/io/TableAppender.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sis.io;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import java.io.Flushable;
import java.io.IOException;
import java.io.UncheckedIOException;
import org.apache.sis.pending.jdk.JDK21;
import org.apache.sis.util.ArraysExt;
import org.apache.sis.util.CharSequences;
import org.apache.sis.util.resources.Errors;
import org.apache.sis.util.internal.shared.X364;
import static org.apache.sis.util.Characters.isLineOrParagraphSeparator;
/**
* An {@link Appendable} which formats the text as a table suitable for displaying in devices using
* a monospaced font. Columns are separated by tabulations ({@code '\t'}) and rows are separated by
* {@linkplain org.apache.sis.util.Characters#isLineOrParagraphSeparator(int) line or paragraph separators}.
* The content of every table cells are stored in memory until the {@link #flush()} method is invoked.
* When invoked, {@code flush()} copies the cell contents to the {@linkplain #out underlying stream
* or buffer} while replacing tabulations by some number of spaces and drawing borders.
* The exact number of spaces is computed from the cell widths.
*
* <p>For example, the following code:</p>
*
* {@snippet lang="java" :
* var table = new TableAppender(System.out);
* table.nextLine('═');
* table.append("English\tFrench\tr.e.d.\n");
* table.nextLine('-');
* table.append("Mercury\tMercure\t0.382\n")
* .append("Venus\tVénus\t0.949\n")
* .append("Earth\tTerre\t1.00\n")
* .append("Mars\tMars\t0.532\n");
* table.nextLine('═');
* table.flush();
* }
*
* produces the following output:
*
* <pre class="text">
* ╔═════════╤═════════╤════════╗
* ║ English │ French │ r.e.d. ║
* ╟─────────┼─────────┼────────╢
* ║ Mercury │ Mercure │ 0.382 ║
* ║ Venus │ Vénus │ 0.949 ║
* ║ Earth │ Terre │ 1.00 ║
* ║ Mars │ Mars │ 0.532 ║
* ╚═════════╧═════════╧════════╝</pre>
*
* @author Martin Desruisseaux (MPO, IRD, Geomatys)
* @version 1.0
*
* @see org.apache.sis.util.collection.TreeTableFormat
*
* @since 0.3
*/
public class TableAppender extends Appender implements Flushable {
/**
* A possible value for cell alignment. This specifies that the text is aligned
* to the left indent and extra whitespace should be placed on the right.
*/
public static final byte ALIGN_LEFT = -1;
/**
* A possible value for cell alignment. This specifies that the text is aligned
* to the center and extra whitespace should be placed equally on the left and right.
*/
public static final byte ALIGN_CENTER = 0;
/**
* A possible value for cell alignment. This specifies that the text is aligned
* to the right indent and extra whitespace should be placed on the left.
*/
public static final byte ALIGN_RIGHT = +1;
/**
* Drawing-box characters. The last two characters
* are horizontal and vertical line respectively.
*/
private static final char[][] BOX = new char[][] {
{// [0000]: single horizontal, single vertical
'┌','┬','┐',
'├','┼','┤',
'└','┴','┘',
'─','│'
},
{// [0001]: single horizontal, double vertical
'╓','╥','╖',
'╟','╫','╢',
'╙','╨','╜',
'─','║'
},
{// [0010]: double horizontal, single vertical
'╒','╤','╕',
'╞','╪','╡',
'╘','╧','╛',
'═','│'
},
{// [0011]: double horizontal, double vertical
'╔','╦','╗',
'╠','╬','╣',
'╚','╩','╝',
'═','║'
}
};
/**
* The character for empty spaces to insert between columns.
*/
private static final char SPACE = ' ';
/**
* Temporary string buffer. This buffer contains only one cell content.
*/
private final StringBuilder buffer = new StringBuilder(64);
/**
* List of {@link Cell} objects, from left to right and top to bottom.
* By convention, a {@code null} value or a {@link Cell} object with
* <code>{@linkplain Cell#text} == null</code> means that we need to move
* to the next line.
*/
private final List<Cell> cells = new ArrayList<>();
/**
* Alignment for current and next cells.
*
* @see #getCellAlignment()
* @see #setCellAlignment(byte)
*/
private byte alignment = ALIGN_LEFT;
/**
* Column position of the cell currently being written. The field
* is incremented every time {@link #nextColumn()} is invoked.
*/
private int currentColumn;
/**
* Line position of the cell currently being written. The field
* is incremented every time {@link #nextLine()} is invoked.
*/
private int currentRow;
/**
* Maximum width for each columns. This array length must
* be equal to the number of columns in this table.
*/
private int[] maximalColumnWidths = ArraysExt.EMPTY_INT;
/**
* The line separator. We will use the first line separator found in the
* text to provided by the user, or the system default if none.
*/
private String lineSeparator;
/**
* The column separator, or an empty string if none.
*/
private final String columnSeparator;
/**
* The left table border, or an empty string if none.
*/
private final String leftBorder;
/**
* The right table border, or an empty string if none.
*/
private final String rightBorder;
/**
* Tells if cells can span more than one line. If {@code true}, then EOL characters likes
* {@code '\n'} move to the next line <em>inside</em> the current cell. If {@code false},
* then EOL characters move to the next table row. Default value is {@code false}.
*/
private boolean multiLinesCells;
/**
* {@code true} if the next character needs to be skipped if equals to {@code '\n'}.
*/
private boolean skipLF;
/**
* Sets to {@code true} at construction time if {@link #out} has been created by the
* constructor rather than supplied by the user.
*/
private boolean ownOut;
/**
* Creates a new table formatter writing in an internal buffer with a default column separator.
* The default is a vertical double line for the left and right table borders, and a single
* line between the columns.
*/
public TableAppender() {
this(new StringBuilder(256));
ownOut = true;
}
/**
* Creates a new table formatter writing in an internal buffer with the specified column separator.
*
* @param separator string to write between columns.
*/
public TableAppender(final String separator) {
this(new StringBuilder(256), separator);
ownOut = true;
}
/**
* Creates a new table formatter writing in the given output with a default column separator.
* The default is a vertical double line for the left and right table borders, and a single
* line between the columns.
*
* @param out the underlying stream or buffer to write to.
*/
public TableAppender(final Appendable out) {
super(out);
leftBorder = "║ ";
rightBorder = " ║" ;
columnSeparator = " │ ";
}
/**
* Creates a new table formatter writing in the given output with the specified column separator.
*
* @param out the underlying stream or buffer to write to.
* @param separator string to write between columns.
*/
public TableAppender(final Appendable out, final String separator) {
super(out);
/*
* Following methods use Character.isWhitespace(…) instead of Character.isSpaceChar(…).
* This has the effect of removing some ISO control characters (line feeds, tabulation,
* etc.) from the border. If this policy is changed, search for other occurrences of
* `isWhitespace` in this class for ensuring consistency. Note however that the same
* policy is not necessarily applied everywhere.
*/
final int length = separator.length();
leftBorder = separator.substring( CharSequences.skipLeadingWhitespaces (separator, 0, length));
rightBorder = separator.substring(0, CharSequences.skipTrailingWhitespaces(separator, 0, length));
columnSeparator = separator;
}
/**
* Creates a new table formatter writing in the given output with the specified column separator and border.
*
* @param out the underlying stream or buffer to write to.
* @param leftBorder string to write on the left side of the table.
* @param separator string to write between columns.
* @param rightBorder string to write on the right side of the table.
*
* @since 0.8
*/
public TableAppender(final Appendable out, final String leftBorder, final String separator, final String rightBorder) {
super(out);
this.leftBorder = Objects.requireNonNull(leftBorder);
this.rightBorder = Objects.requireNonNull(rightBorder);
this.columnSeparator = Objects.requireNonNull(separator);
}
/**
* Writes a border or a corner to the underlying stream or buffer.
*
* @param horizontalBorder -1 for left border, +1 for right border, 0 for center.
* @param verticalBorder -1 for top border, +1 for bottom border, 0 for center.
* @param horizontalChar character to use for horizontal line.
* @throws IOException if the writing operation failed.
*/
private void writeBorder(final int horizontalBorder,
final int verticalBorder,
final char horizontalChar) throws IOException
{
/*
* Get the set of characters to use for the horizontal line.
*/
int boxCount = 0;
final char[][] box = new char[BOX.length][];
for (final char[] row : BOX) {
if (row[9] == horizontalChar) {
box[boxCount++] = row;
}
}
/*
* Get a string which contains the vertical lines to draw
* on the left, on the right or in the center of the table.
*/
final String border;
switch (horizontalBorder) {
case -1: border = leftBorder; break;
case +1: border = rightBorder; break;
case 0: border = columnSeparator; break;
default: throw new AssertionError(horizontalBorder);
}
assert (verticalBorder >= -1) && (verticalBorder <= +1) : verticalBorder;
/*
* Remplaces spaces by the horizontal lines, and vertical lines by an intersection.
* Use Character.isWhitespace(…) instead of Character.isSpaceChar(…) for consistency
* with the policy used in the constructor, since we work on the same object (namely
* the border strings).
*/
final int index = (horizontalBorder+1) + (verticalBorder+1)*3;
final int borderLength = border.length();
for (int i=0; i<borderLength;) {
int c = border.codePointAt(i);
i += Character.charCount(c);
if (Character.isWhitespace(c)) {
c = horizontalChar;
} else {
for (int j=0; j<boxCount; j++) {
if (box[j][10] == c) {
c = box[j][index];
break;
}
}
}
appendCodePoint(c);
}
}
/**
* Returns {@code true} if EOL characters are used for line feeds inside current cells.
*
* @return {@code true} if EOL characters are to be write inside the cell.
*/
public boolean isMultiLinesCells() {
return multiLinesCells;
}
/**
* Sets the desired behavior for EOL and tabulations characters.
*
* <ul>
* <li>If {@code true}, then tabulations,
* {@linkplain org.apache.sis.util.Characters#isLineOrParagraphSeparator(int)
* line and paragraph separator} characters are copied into the current cell.
* Subsequent writing operations will continue inside the same cell.</li>
* <li>If {@code false}, then tabulations move to next column and EOL move
* to the first cell of next row (i.e. tabulation and EOL are equivalent to
* {@link #nextColumn()} and {@link #nextLine()} calls respectively).</li>
* </ul>
*
* The default value is {@code false}.
*
* @param multiLines {@code true} true if EOL are used for line feeds inside current cells,
* or {@code false} if EOL move to the next row.
*/
public void setMultiLinesCells(final boolean multiLines) {
multiLinesCells = multiLines;
}
/**
* Returns the alignment of the text inside the current cell.
* The default value is {@link #ALIGN_LEFT}.
*
* @return current cell alignment as one of the {@link #ALIGN_LEFT},
* {@link #ALIGN_RIGHT} or {@link #ALIGN_CENTER} constants.
*/
public byte getCellAlignment() {
return alignment;
}
/**
* Sets the alignment of the text inside the current cell. The alignments of any cell
* written prior this method call are left unchanged. The new alignment will apply to
* the next cells too until this {@code setCellAlignment(…)} method is invoked again
* with a different value.
*
* <p>If this method is never invoked, then the default alignment is {@link #ALIGN_LEFT}.</p>
*
* @param alignment the new cell alignment as one of the {@link #ALIGN_LEFT},
* {@link #ALIGN_RIGHT} or {@link #ALIGN_CENTER} constants.
*/
public void setCellAlignment(final byte alignment) {
if (alignment < ALIGN_LEFT || alignment > ALIGN_RIGHT) {
throw new IllegalArgumentException(Errors.format(
Errors.Keys.IllegalArgumentValue_2, "alignment", alignment));
}
this.alignment = alignment;
}
/**
* Returns the line separator between table rows. This is the first line separator found in the
* text formatted as a table, or the {@linkplain System#lineSeparator() system default} if no
* line separator was found in the text to format.
*
* @return the line separator between table rows.
*
* @since 1.0
*/
public String getLineSeparator() {
if (lineSeparator == null) {
lineSeparator = System.lineSeparator();
}
return lineSeparator;
}
/**
* Returns the number of rows in this table. This count is reset to 0 by {@link #flush()}.
*
* @return the number of rows in this table.
*/
public int getRowCount() {
int count = currentRow;
if (currentColumn != 0) {
count++; // Some writting has begun in the current row.
}
return count;
}
/**
* Returns the number of columns in this table.
*
* @return the number of columns in this table.
*/
public int getColumnCount() {
return maximalColumnWidths.length;
}
/**
* Writes a single character.
* If {@link #isMultiLinesCells()} is {@code false} (which is the default), then:
*
* <ul>
* <li>Tabulations ({@code '\t'}) are replaced by calls to {@link #nextColumn()}.</li>
* <li>{@linkplain org.apache.sis.util.Characters#isLineOrParagraphSeparator(int)
* line or paragraph separators} are replaced by calls to {@link #nextLine()}.</li>
* </ul>
*
* @param c character to write.
* @return {@code this}.
*/
@Override
public TableAppender append(final char c) {
final int cp = toCodePoint(c);
if (!multiLinesCells) {
if (cp == '\t') {
nextColumn();
skipLF = false;
return this;
}
if (isLineOrParagraphSeparator(cp)) {
if (cp == '\n') {
if (!skipLF) {
nextLine();
}
skipLF = false;
} else {
nextLine();
skipLF = true;
}
return this;
}
}
buffer.appendCodePoint(cp);
skipLF = false;
return this;
}
/**
* Appends the specified character sequence.
*
* @param sequence the character sequence to append, or {@code null}.
* @return a reference to this {@code Appendable}.
*/
@Override
public TableAppender append(CharSequence sequence) {
if (sequence == null) {
sequence = "null";
}
return append(sequence, 0, sequence.length());
}
/**
* Writes a portion of a character sequence. Tabulations and line separators are
* interpreted as by {@link #append(char)}.
*
* @param sequence the character sequence to be written.
* @param start index from which to start reading characters.
* @param end index of the character following the last character to read.
* @return {@code this}.
*/
@Override
@SuppressWarnings("fallthrough")
public TableAppender append(final CharSequence sequence, int start, int end) {
Objects.checkFromToIndex(start, end, sequence.length());
if (lineSeparator == null) {
lineSeparator = lineSeparator(sequence, start, end);
}
try {
start = appendSurrogate(sequence, start, end);
} catch (IOException e) {
/*
* Should never happen, because appendSurrogate(…) delegates to append(char)
* which is overriden without `throws IOException` clause in this class.
*/
throw new UncheckedIOException(e);
}
if (start != end) {
if (skipLF && sequence.charAt(start) == '\n') {
start++;
}
if (!multiLinesCells) {
int cp = 0;
int upper = start;
while (upper != end) {
cp = toCodePoint(sequence.charAt(upper++));
if (cp >= 0 && (cp == '\t' || isLineOrParagraphSeparator(cp))) {
buffer.append(sequence, start, upper - Character.charCount(cp));
switch (cp) {
case '\r': if (upper < end && sequence.charAt(upper) == '\n') upper++;
default: nextLine(); break; // Applies also to the above '\r' case.
case '\t': nextColumn(); break;
}
start = upper;
}
}
skipLF = (cp == '\r'); // Check the last character.
} else {
/*
* The call to `toCodePoint` is for forcing the initialization of
* super.highSurrogate field value. Even if we fall in the middle
* of a surrogate pair, it should not hurt because in this context,
* toCodePoint should either returns -1 or its argument unchanged.
*/
assert !isHighSurrogate();
skipLF = (toCodePoint(sequence.charAt(end - 1)) == '\r');
}
if (isHighSurrogate()) {
end--;
}
buffer.append(sequence, start, end);
}
return this;
}
/**
* Writes an horizontal separator using the {@code '─'} character.
*
* @see #nextLine(char)
*/
public void appendHorizontalSeparator() {
if (currentColumn != 0 || buffer.length() != 0) {
nextLine();
}
nextLine('─');
}
/**
* Moves one column to the right.
* The subsequent writing operations will occur in a new cell on the same row.
*/
public void nextColumn() {
nextColumn(SPACE);
}
/**
* Moves one column to the right, filling remaining space with the given character.
* The subsequent writing operations will occur in a new cell on the same row.
*
* <p>Calling {@code nextColumn('*')} from the first character
* in a cell is a convenient way to put a pad value in this cell.</p>
*
* @param fill character filling the cell (default to whitespace).
*/
public void nextColumn(final char fill) {
final String cellText = buffer.toString();
cells.add(new Cell(cellText, alignment, fill));
if (currentColumn >= maximalColumnWidths.length) {
maximalColumnWidths = Arrays.copyOf(maximalColumnWidths, currentColumn+1);
}
int width = 0;
int lineStart = 0;
final int length = cellText.length();
while (lineStart < length) {
final int nextLine = CharSequences.indexOfLineStart(cellText, 1, lineStart);
for (int i=nextLine; --i >= lineStart;) {
if (!Character.isISOControl(cellText.charAt(i))) {
final int lg = X364.lengthOfPlain(cellText, lineStart, i+1);
if (lg > width) {
width = lg;
}
}
}
lineStart = nextLine;
}
if (width > maximalColumnWidths[currentColumn]) {
maximalColumnWidths[currentColumn] = width;
}
currentColumn++;
buffer.setLength(0);
}
/**
* Moves to the first column on the next row.
* The subsequent writing operations will occur on a new row.
*/
public void nextLine() {
nextLine(SPACE);
}
/**
* Moves to the first column on the next row, filling every remaining cell in the current
* row with the specified character. The subsequent writing operations will occur on a new
* row.
*
* <p>Calling {@code nextLine('-')} or {@code nextLine('═')} from the first column of a row
* is a convenient way to fill this row with a line separator.</p>
*
* @param fill character filling the rest of the line (default to whitespace).
* This character may be use as a row separator.
*
* @see #appendHorizontalSeparator()
*/
public void nextLine(final char fill) {
if (buffer.length() != 0) {
nextColumn(fill);
}
assert buffer.length() == 0;
cells.add((fill != SPACE) ? new Cell(null, alignment, fill) : null);
currentColumn = 0;
currentRow++;
}
/**
* Flushes the table content to the underlying stream or buffer. This method should not
* be called before the table is completed (otherwise, columns may have the wrong width).
*
* @throws IOException if an output operation failed.
*/
@Override
public void flush() throws IOException {
if (buffer.length() != 0) {
nextLine();
assert buffer.length() == 0;
}
if (!ownOut) {
writeTable();
}
cells.clear();
currentRow = 0;
currentColumn = 0;
if (!(out instanceof TableAppender)) {
/*
* Flush only if this table is not included in an outer (bigger) table.
* This is because flushing the outer table would break its formatting.
*/
IO.flush(out);
}
}
/**
* Returns the content of this {@code TableAppender} as a string if possible.
*
* <ul>
* <li>If this {@code TableAppender} has been created without explicit {@link Appendable},
* then this method always returns the current table content formatted as a string.</li>
* <li>Otherwise, if {@link #out} implements {@link CharSequence} or is directly or
* indirectly a wrapper around a {@code CharSequence}, returns its {@code toString()}
* representation. The string will contain this table content only if {@link #flush()}
* has been invoked prior this {@code toString()} method.</li>
* <li>Otherwise returns the localized <q>Unavailable content</q> string.</li>
* </ul>
*/
@Override
public String toString() {
if (ownOut) {
((StringBuilder) out).setLength(0);
try {
writeTable();
} catch (IOException e) {
// Should never happen because we are writing in a StringBuilder.
throw new UncheckedIOException(e);
}
}
return super.toString();
}
/**
* Writes the table without clearing the {@code TableAppender} content.
* Invoking this method many time would result in the same table being
* repeated.
*/
private void writeTable() throws IOException {
@SuppressWarnings("LocalVariableHidesMemberVariable")
final String columnSeparator = this.columnSeparator;
final Cell[] currentLine = new Cell[maximalColumnWidths.length];
final int cellCount = cells.size();
for (int cellIndex=0; cellIndex<cellCount; cellIndex++) {
/*
* Copies in `currentLine` every cells to write in the current table row.
* Those elements exclude the last null sentinel value. The `currentLine`
* array initially contains no null element, but some element will be set
* to null as we progress in the writing process.
*/
Cell lineFill = null;
int currentCount = 0;
do {
final Cell cell = cells.get(cellIndex);
if (cell == null) {
break;
}
if (cell.text == null) {
lineFill = new Cell("", cell.alignment, cell.fill);
break;
}
currentLine[currentCount++] = cell;
}
while (++cellIndex < cellCount);
Arrays.fill(currentLine, currentCount, currentLine.length, lineFill);
/*
* The loop below will be executed as long as we have some lines to write,
* (i.e. as long as at least one element is non-null). If a cell contains
* EOL characters, then we will need to format it as a multi-lines cell.
*/
while (!isEmpty(currentLine)) {
for (int j=0; j<currentLine.length; j++) {
final boolean isFirstColumn = (j == 0);
final boolean isLastColumn = (j+1 == currentLine.length);
final Cell cell = currentLine[j];
final int cellWidth = maximalColumnWidths[j];
final int cellPadding = isLastColumn && rightBorder.isEmpty() ? 0 : cellWidth;
if (cell == null) {
if (isFirstColumn) {
out.append(leftBorder);
}
repeat(out, SPACE, cellPadding);
out.append(isLastColumn ? rightBorder : columnSeparator);
continue;
}
String cellText = cell.text;
int textLength = cellText.length();
Cell remaining = null;
for (int endOfFirstLine=0; endOfFirstLine < textLength;) {
int c = cellText.codePointAt(endOfFirstLine);
int next = endOfFirstLine + Character.charCount(c);
if (isLineOrParagraphSeparator(c)) {
/*
* If a EOL character has been found, write only the first line in the cell.
* The `currentLine[j]` element will be modified in order to contain only
* the remaining lines, which will be written in next loop iterations.
*/
if (c == '\r' && (next < textLength) && cellText.charAt(next) == '\n') {
next++;
}
/*
* Verify if the remaining contains only white spaces. If so, those spaces
* will be ignored. But if there is at least one non-white character, then
* we will not skip those spaces. We use Character.isWhitespace(…) instead
* of Character.isSpaceChar(…) in order to consider non-breaking spaces as
* non-white characters. This is similar to the use of in HTML tables,
* which can be used for forcing the insertion of anotherwise ignored space.
*/
for (int i=next; i<textLength; i += Character.charCount(c)) {
c = cellText.codePointAt(i);
if (!Character.isWhitespace(c)) {
remaining = cell.substring(next);
break;
}
}
cellText = cellText.substring(0, endOfFirstLine);
break;
}
endOfFirstLine = next;
}
currentLine[j] = remaining;
textLength = X364.lengthOfPlain(cellText, 0, cellText.length());
/*
* If the cell to write is actually a border, do a special processing
* in order to use the characters defined in the BOX static constant.
*/
if (currentCount == 0) {
assert textLength == 0;
final int verticalBorder;
if (cellIndex == 0) verticalBorder = -1;
else if (cellIndex >= cellCount-1) verticalBorder = +1;
else verticalBorder = 0;
if (isFirstColumn) {
writeBorder(-1, verticalBorder, cell.fill);
}
repeat(out, cell.fill, Character.isWhitespace(cell.fill) ? cellPadding : cellWidth);
writeBorder(isLastColumn ? +1 : 0, verticalBorder, cell.fill);
continue;
}
/*
* If the cell is not a border, it is a normal cell.
* Write a single line of this cell content.
*/
if (isFirstColumn) {
out.append(leftBorder);
}
final Appendable tabExpander = (cellText.indexOf('\t') >= 0)
? new LineAppender(out, Integer.MAX_VALUE, true) : out;
switch (cell.alignment) {
default: {
throw new AssertionError(cell.alignment);
}
case ALIGN_LEFT: {
tabExpander.append(cellText);
repeat(tabExpander, cell.fill, cellPadding - textLength);
break;
}
case ALIGN_RIGHT: {
repeat(tabExpander, cell.fill, cellWidth - textLength);
tabExpander.append(cellText);
break;
}
case ALIGN_CENTER: {
final int leftPadding = (cellWidth - textLength) / 2;
repeat(tabExpander, cell.fill, leftPadding);
tabExpander.append(cellText);
repeat(tabExpander, cell.fill, (cellPadding - leftPadding) - textLength);
break;
}
}
out.append(isLastColumn ? rightBorder : columnSeparator);
}
if (lineSeparator == null) {
lineSeparator = System.lineSeparator();
}
out.append(lineSeparator);
}
}
}
/**
* Checks if {@code array} contains only {@code null} elements.
*/
private static boolean isEmpty(final Object[] array) {
for (int i=array.length; --i>=0;) {
if (array[i] != null) {
return false;
}
}
return true;
}
/**
* Repeats a character. The {@code count} value may be negative,
* which is handled as if it was zero.
*
* @param out the stream or buffer where to repeat the character.
* @param car character to write (usually ' ').
* @param count number of repetition, negative means 0.
*/
private static void repeat(final Appendable out, final char car, int count) throws IOException {
if (out instanceof StringBuilder) {
if (count > 0) {
JDK21.repeat((StringBuilder) out, car, count);
}
} else while (--count >= 0) {
out.append(car);
}
}
/**
* A class wrapping a cell content and its text alignment.
* This class if for internal use only.
*
* @author Martin Desruisseaux (IRD, Geomatys)
*/
private static final class Cell {
/**
* The text to write inside the cell.
*/
final String text;
/**
* The alignment for {@link #text} inside the cell.
*/
byte alignment;
/**
* The fill character, used for filling space inside the cell.
*/
final char fill;
/**
* Returns a new cell wrapping the specified string with the
* specified alignment and fill character.
*/
Cell(final String text, final byte alignment, final char fill) {
this.text = text;
this.alignment = alignment;
this.fill = fill;
}
/**
* Returns a new cell which contains substring of this cell.
*/
Cell substring(final int lower) {
return new Cell(text.substring(lower), alignment, fill);
}
/**
* Returns the cell content.
*/
@Override
public String toString() {
return text;
}
}
}
|
hibernate/hibernate-search | 33,140 | integrationtest/mapper/orm/src/test/java/org/hibernate/search/integrationtest/mapper/orm/massindexing/MassIndexingInheritanceIT.java | /*
* SPDX-License-Identifier: Apache-2.0
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.search.integrationtest.mapper.orm.massindexing;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Fail.fail;
import static org.hibernate.search.util.impl.integrationtest.mapper.orm.OrmUtils.with;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.io.Serializable;
import java.nio.charset.StandardCharsets;
import java.util.Locale;
import java.util.Set;
import java.util.stream.Collectors;
import jakarta.persistence.DiscriminatorColumn;
import jakarta.persistence.Entity;
import jakarta.persistence.Id;
import jakarta.persistence.Inheritance;
import jakarta.persistence.InheritanceType;
import org.hibernate.SessionFactory;
import org.hibernate.resource.jdbc.spi.StatementInspector;
import org.hibernate.search.engine.backend.types.Aggregable;
import org.hibernate.search.engine.backend.types.Searchable;
import org.hibernate.search.engine.backend.types.Sortable;
import org.hibernate.search.engine.backend.work.execution.DocumentCommitStrategy;
import org.hibernate.search.engine.backend.work.execution.DocumentRefreshStrategy;
import org.hibernate.search.mapper.orm.Search;
import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
import org.hibernate.search.mapper.orm.massindexing.MassIndexer;
import org.hibernate.search.mapper.orm.session.SearchSession;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.DocumentId;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.GenericField;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.Indexed;
import org.hibernate.search.util.impl.integrationtest.common.extension.BackendMock;
import org.hibernate.search.util.impl.integrationtest.mapper.orm.OrmSetupHelper;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
import org.junit.jupiter.api.extension.RegisterExtension;
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
class MassIndexingInheritanceIT {
private PrintStream sysOut;
private MassIndexerStatementInspector statementInspector;
private SessionFactory sessionFactory;
@RegisterExtension
public static BackendMock backendMock = BackendMock.create();
@RegisterExtension
public static OrmSetupHelper ormSetupHelper = OrmSetupHelper.withBackendMock( backendMock );
@BeforeAll
void setup() {
sysOut = System.out;
backendMock.resetExpectations();
statementInspector = new MassIndexerStatementInspector();
OrmSetupHelper.SetupContext setupContext = ormSetupHelper.start()
.dataClearing( c -> c.clearIndexData( false ).clearDatabaseData( false ) )
.withPropertyRadical( HibernateOrmMapperSettings.Radicals.INDEXING_LISTENERS_ENABLED, false )
// .withProperty( "hibernate.session_factory.statement_inspector", statementInspector )
.withProperty( "hibernate.show_sql", true )
.withAnnotatedTypes( Car.class, Truck.class, DooredVehicle.class, BedVehicle.class, BaseVehicle.class,
BaseEntity.class, Van.class,
A.class, AA.class, BA.class, ABA.class, BBA.class, AABA.class, AAABA.class,
Root.class, ARoot.class, BRoot.class,
RootTablePerClass.class, ARootTablePerClass.class, BRootTablePerClass.class,
RootSingleTable.class, ARootSingleTable.class, BRootSingleTable.class
);
// We add the schema expectation as a part of a configuration, and as a last configuration.
// this way we will only set the expectation only when the entire config was a success:
setupContext.withConfiguration(
ignored -> backendMock.expectAnySchema( Car.INDEX ).expectAnySchema( Truck.INDEX )
.expectAnySchema( BedVehicle.INDEX )
.expectAnySchema( A.INDEX )
.expectAnySchema( AA.INDEX )
.expectAnySchema( BA.INDEX )
.expectAnySchema( BBA.INDEX )
.expectAnySchema( AAABA.INDEX )
.expectAnySchema( Root.INDEX )
.expectAnySchema( ARoot.INDEX )
.expectAnySchema( RootTablePerClass.INDEX )
.expectAnySchema( ARootTablePerClass.INDEX )
.expectAnySchema( BRootTablePerClass.INDEX )
.expectAnySchema( RootSingleTable.INDEX )
.expectAnySchema( ARootSingleTable.INDEX )
);
sessionFactory = setupContext.setup();
with( sessionFactory ).runInTransaction( session -> {
session.persist( Car.create( 1L ) );
session.persist( Car.create( 2L ) );
session.persist( Car.create( 3L ) );
session.persist( Truck.create( 10L ) );
session.persist( BedVehicle.create( 100L ) );
session.persist( BedVehicle.create( 101L ) );
session.persist( Van.create( 10_000L ) );
session.persist( A.create( 100_000L ) );
session.persist( AA.create( 100_001L ) );
session.persist( BA.create( 100_002L ) );
session.persist( ABA.create( 100_003L ) );
session.persist( BBA.create( 100_004L ) );
session.persist( AAABA.create( 100_005L ) );
session.persist( Root.create( 1_000_000L ) );
session.persist( ARoot.create( 1_000_001L ) );
session.persist( BRoot.create( 1_000_002L ) );
session.persist( RootSingleTable.create( 2_000_000L ) );
session.persist( ARootSingleTable.create( 2_000_001L ) );
session.persist( BRootSingleTable.create( 2_000_002L ) );
session.persist( RootTablePerClass.create( 3_000_000L ) );
session.persist( ARootTablePerClass.create( 3_000_001L ) );
session.persist( BRootTablePerClass.create( 3_000_002L ) );
} );
}
@AfterAll
void afterAll() {
System.setOut( sysOut );
}
@BeforeEach
void setUp() throws IOException {
ByteArrayOutputStream testSysOut = new ByteArrayOutputStream();
System.setOut( new PrintStream( testSysOut, false, StandardCharsets.UTF_8 ) );
statementInspector.reset( testSysOut );
}
@AfterEach
void tearDown() throws IOException {
statementInspector.close();
}
@Test
void singleEntity() {
with( sessionFactory ).runNoTransaction( session -> {
SearchSession searchSession = Search.session( session );
MassIndexer indexer = searchSession.massIndexer( Car.class );
// add operations on indexes can follow any random order,
// since they are executed by different threads
backendMock.expectWorks(
Car.INDEX, DocumentCommitStrategy.NONE, DocumentRefreshStrategy.NONE
)
.add( "1", b -> b
.field( "id", 1L )
)
.add( "2", b -> b
.field( "id", 2L )
)
.add( "3", b -> b
.field( "id", 3L )
);
// purgeAtStart and mergeSegmentsAfterPurge are enabled by default,
// so we expect 1 purge, 1 mergeSegments and 1 flush calls in this order:
backendMock.expectIndexScaleWorks( Car.INDEX )
.purge()
.mergeSegments()
.flush()
.refresh();
try {
indexer.startAndWait();
}
catch (InterruptedException e) {
fail( "Unexpected InterruptedException: " + e.getMessage() );
}
} );
statementInspector.hasSelects( 3 )
// select count(c1_0.id) from car c1_0
.anyMatch( "select count(_big)?\\([a-z0-9_.]+\\) from car [a-z0-9_.]+" )
// select c1_0.id from Car c1_0
.anyMatch( "select [a-z0-9_.]+ from car [a-z0-9_.]+" )
// select c1_0.id,c1_1.bodyType,c1_2.doorType,c1_0.carHood from Car c1_0 join BaseVehicle c1_1 on c1_0.id=c1_1.id join DooredVehicle c1_2 on c1_0.id=c1_2.id where c1_0.id in (?,?,?)
.anyMatch( "select [a-z0-9_.,]+ from car [a-z0-9_.]+ "
+ "join basevehicle [a-z0-9_.]+ on [a-z0-9_.=]+ "
+ "join dooredvehicle [a-z0-9_.]+ on [a-z0-9_.=]+ "
+ "where .+" );
backendMock.verifyExpectationsMet();
}
@Test
void singleEntity_notTopOne() {
with( sessionFactory ).runNoTransaction( session -> {
SearchSession searchSession = Search.session( session );
MassIndexer indexer = searchSession.massIndexer( BedVehicle.class );
backendMock.expectWorks(
Truck.INDEX, DocumentCommitStrategy.NONE, DocumentRefreshStrategy.NONE
)
.add( "10", b -> b
.field( "id", 10L )
);
backendMock.expectWorks(
BedVehicle.INDEX, DocumentCommitStrategy.NONE, DocumentRefreshStrategy.NONE
)
.add( "100", b -> b
.field( "id", 100L )
)
.add( "101", b -> b
.field( "id", 101L )
);
backendMock.expectIndexScaleWorks( BedVehicle.INDEX )
.purge()
.mergeSegments()
.flush()
.refresh();
backendMock.expectIndexScaleWorks( Truck.INDEX )
.purge()
.mergeSegments()
.flush()
.refresh();
try {
indexer.startAndWait();
}
catch (InterruptedException e) {
fail( "Unexpected InterruptedException: " + e.getMessage() );
}
} );
statementInspector.hasSelects( 3 )
// select count(bv1_0.id) from bedvehicle bv1_0
.anyMatch( "select count(_big)?\\([a-z0-9_.]+\\) from bedvehicle [a-z0-9_.]+" )
// there can be an additional join see https://hibernate.atlassian.net/browse/HHH-18503?focusedCommentId=116667
// select bv1_1.id from bedvehicle bv1_0 join baseentity bv1_1 on bv1_0.id=bv1_1.id
.anyMatch( "select [a-z0-9_.]+ from bedvehicle [a-z0-9_.]+ join baseentity [a-z0-9_.]+ on [a-z0-9_.=]+" )
// select bv1_0.id,bv1_1.type,bv1_2.bodytype,bv1_3.doortype,bv1_0.bedtype,bv1_4.truckroof
// from bedvehicle bv1_0 join baseentity bv1_1 on bv1_0.id=bv1_1.id join basevehicle bv1_2 on bv1_0.id=bv1_2.id join dooredvehicle bv1_3 on bv1_0.id=bv1_3.id left join truck bv1_4 on bv1_0.id=bv1_4.id where bv1_0.id=?
.anyMatch( "select [a-z0-9_.,]+ from bedvehicle [a-z0-9_.]+ "
+ "join baseentity [a-z0-9_.]+ on [a-z0-9_.=]+ "
+ "join basevehicle [a-z0-9_.]+ on [a-z0-9_.=]+ "
+ "join dooredvehicle [a-z0-9_.]+ on [a-z0-9_.=]+ "
+ "left join truck [a-z0-9_.]+ on [a-z0-9_.=]+ "
+ "where .+" );
backendMock.verifyExpectationsMet();
}
@Test
void multipleTopLevelEntities() {
with( sessionFactory ).runNoTransaction( session -> {
SearchSession searchSession = Search.session( session );
MassIndexer indexer = searchSession.massIndexer( Car.class, Truck.class );
// add operations on indexes can follow any random order,
// since they are executed by different threads
backendMock.expectWorks(
Car.INDEX, DocumentCommitStrategy.NONE, DocumentRefreshStrategy.NONE
)
.add( "1", b -> b
.field( "id", 1L )
)
.add( "2", b -> b
.field( "id", 2L )
)
.add( "3", b -> b
.field( "id", 3L )
);
backendMock.expectWorks(
Truck.INDEX, DocumentCommitStrategy.NONE, DocumentRefreshStrategy.NONE
)
.add( "10", b -> b
.field( "id", 10L )
);
// purgeAtStart and mergeSegmentsAfterPurge are enabled by default,
// so we expect 1 purge, 1 mergeSegments and 1 flush calls in this order:
backendMock.expectIndexScaleWorks( Car.INDEX )
.purge()
.mergeSegments()
.flush()
.refresh();
backendMock.expectIndexScaleWorks( Truck.INDEX )
.purge()
.mergeSegments()
.flush()
.refresh();
try {
indexer.startAndWait();
}
catch (InterruptedException e) {
fail( "Unexpected InterruptedException: " + e.getMessage() );
}
} );
statementInspector.hasSelects( 6 )
// select count(c1_0.id) from car c1_0
.anyMatch( "select count(_big)?\\([a-z0-9_.]+\\) from car [a-z0-9_.]+" )
// select c1_0.id from Car c1_0
.anyMatch( "select [a-z0-9_.]+ from car [a-z0-9_.]+" )
// select c1_0.id,c1_1.bodyType,c1_2.doorType,c1_0.carHood from Car c1_0 join BaseVehicle c1_1 on c1_0.id=c1_1.id join DooredVehicle c1_2 on c1_0.id=c1_2.id where c1_0.id in (?,?,?)
.anyMatch( "select [a-z0-9_.,]+ from car [a-z0-9_.]+ "
+ "join basevehicle [a-z0-9_.]+ on [a-z0-9_.=]+ "
+ "join dooredvehicle [a-z0-9_.]+ on [a-z0-9_.=]+ "
+ "where .+" )
// count(t1_0.id) from truck t1_0
.anyMatch( "select count(_big)?\\([a-z0-9_.]+\\) from truck [a-z0-9_.]+" )
// select t1_0.id from truck t1_0
.anyMatch( "select [a-z0-9_.]+ from truck [a-z0-9_.]+" )
// select t1_0.id,t1_1.bodytype,t1_2.doortype,t1_3.bedtype,t1_0.truckroof
// from truck t1_0 join basevehicle t1_1 on t1_0.id=t1_1.id join dooredvehicle t1_2 on t1_0.id=t1_2.id join bedvehicle t1_3 on t1_0.id=t1_3.id where t1_0.id=?
.anyMatch( "select [a-z0-9_.,]+ from truck [a-z0-9_.]+ "
+ "join basevehicle [a-z0-9_.]+ on [a-z0-9_.=]+ "
+ "join dooredvehicle [a-z0-9_.]+ on [a-z0-9_.=]+ "
+ "join bedvehicle [a-z0-9_.]+ on [a-z0-9_.=]+ "
+ "where .+" );
backendMock.verifyExpectationsMet();
}
@Test
void multipleSameBranch() {
with( sessionFactory ).runNoTransaction( session -> {
SearchSession searchSession = Search.session( session );
MassIndexer indexer = searchSession.massIndexer( BedVehicle.class, Truck.class );
// add operations on indexes can follow any random order,
// since they are executed by different threads
backendMock.expectWorks(
Truck.INDEX, DocumentCommitStrategy.NONE, DocumentRefreshStrategy.NONE
)
.add( "10", b -> b
.field( "id", 10L )
);
backendMock.expectWorks(
BedVehicle.INDEX, DocumentCommitStrategy.NONE, DocumentRefreshStrategy.NONE
)
.add( "100", b -> b
.field( "id", 100L )
)
.add( "101", b -> b
.field( "id", 101L )
);
// purgeAtStart and mergeSegmentsAfterPurge are enabled by default,
// so we expect 1 purge, 1 mergeSegments and 1 flush calls in this order:
backendMock.expectIndexScaleWorks( BedVehicle.INDEX )
.purge()
.mergeSegments()
.flush()
.refresh();
backendMock.expectIndexScaleWorks( Truck.INDEX )
.purge()
.mergeSegments()
.flush()
.refresh();
try {
indexer.startAndWait();
}
catch (InterruptedException e) {
fail( "Unexpected InterruptedException: " + e.getMessage() );
}
} );
statementInspector.hasSelects( 3 )
// select count(bv1_0.id) from bedvehicle bv1_0
.anyMatch( "select count(_big)?\\([a-z0-9_.]+\\) from bedvehicle [a-z0-9_.]+" )
// there can be an additional join see https://hibernate.atlassian.net/browse/HHH-18503?focusedCommentId=116667
// select bv1_1.id from bedvehicle bv1_0 join baseentity bv1_1 on bv1_0.id=bv1_1.id
.anyMatch( "select [a-z0-9_.]+ from bedvehicle [a-z0-9_.]+ join baseentity [a-z0-9_.]+ on [a-z0-9_.=]+" )
// select bv1_0.id,bv1_1.type,bv1_2.bodytype,bv1_3.doortype,bv1_0.bedtype,bv1_4.truckroof
// from bedvehicle bv1_0 join baseentity bv1_1 on bv1_0.id=bv1_1.id join basevehicle bv1_2 on bv1_0.id=bv1_2.id join dooredvehicle bv1_3 on bv1_0.id=bv1_3.id left join truck bv1_4 on bv1_0.id=bv1_4.id where bv1_0.id=?
.anyMatch( "select [a-z0-9_.,]+ from bedvehicle [a-z0-9_.]+ "
+ "join baseentity [a-z0-9_.]+ on [a-z0-9_.=]+ "
+ "join basevehicle [a-z0-9_.]+ on [a-z0-9_.=]+ "
+ "join dooredvehicle [a-z0-9_.]+ on [a-z0-9_.=]+ "
+ "left join truck [a-z0-9_.]+ on [a-z0-9_.=]+ "
+ "where .+" );
backendMock.verifyExpectationsMet();
}
@Test
void withMoreConcreteTypeDisabled() {
with( sessionFactory ).runNoTransaction( session -> {
SearchSession searchSession = Search.session( session );
MassIndexer indexer = searchSession.massIndexer( BA.class );
backendMock.expectWorks( BA.INDEX, DocumentCommitStrategy.NONE, DocumentRefreshStrategy.NONE )
.add( "100002", b -> b.field( "id", 100002L ) );
backendMock.expectWorks( BBA.INDEX, DocumentCommitStrategy.NONE, DocumentRefreshStrategy.NONE )
.add( "100004", b -> b.field( "id", 100004L ) );
backendMock.expectWorks( AAABA.INDEX, DocumentCommitStrategy.NONE, DocumentRefreshStrategy.NONE )
.add( "100005", b -> b.field( "id", 100005L ) );
backendMock.expectIndexScaleWorks( BA.INDEX )
.purge()
.mergeSegments()
.flush()
.refresh();
backendMock.expectIndexScaleWorks( BBA.INDEX )
.purge()
.mergeSegments()
.flush()
.refresh();
backendMock.expectIndexScaleWorks( AAABA.INDEX )
.purge()
.mergeSegments()
.flush()
.refresh();
try {
indexer.startAndWait();
}
catch (InterruptedException e) {
fail( "Unexpected InterruptedException: " + e.getMessage() );
}
} );
statementInspector.hasSelects( 9 )
// select count(b1_0.id) from bba b1_0
// select count(a1_0.id) from aaaba a1_0
// select count(b1_0.id) from ba b1_0
// join a b1_1 on b1_0.id=b1_1.id
// where b1_1.type in (?)
.anyMatch( "select count(_big)?\\([a-z0-9_.]+\\) from bba [a-z0-9_.]+" )
.anyMatch( "select count(_big)?\\([a-z0-9_.]+\\) from aaaba [a-z0-9_.]+" )
.anyMatch( "select count(_big)?\\([a-z0-9_.]+\\) from ba [a-z0-9_.]+ "
+ "join a [a-z0-9_.]+ on [a-z0-9_.=]+ "
+ "where .+"
)
// select b1_0.id from bba b1_0
// select a1_0.id from aaaba a1_0
// select b1_0.id from ba b1_0
// join a b1_1 on b1_0.id=b1_1.id
// where b1_1.type in (?)
.anyMatch( "select [a-z0-9_.]+ from bba [a-z0-9_.]+" )
.anyMatch( "select [a-z0-9_.]+ from aaaba [a-z0-9_.]+" )
.anyMatch( "select [a-z0-9_.]+ from ba [a-z0-9_.]+ "
+ "join a [a-z0-9_.]+ on [a-z0-9_.=]+ "
+ "where .+"
)
// select a1_0.id from aaaba a1_0 where a1_0.id=?
// select b1_0.id from bba b1_0 where b1_0.id=?
// select b1_0.id,b1_1.type from ba b1_0 join a b1_1 on b1_0.id=b1_1.id where b1_0.id=?
.anyMatch( "select [a-z0-9_.,]+ from bba [a-z0-9_.]+ where .+" )
.anyMatch( "select [a-z0-9_.,]+ from aaaba [a-z0-9_.]+ where .+" )
.anyMatch( "select [a-z0-9_.,]+ from ba [a-z0-9_.]+ "
+ "join a [a-z0-9_.]+ on [a-z0-9_.=]+ "
+ "where .+"
);
backendMock.verifyExpectationsMet();
}
@Test
void fromRoot() {
with( sessionFactory ).runNoTransaction( session -> {
SearchSession searchSession = Search.session( session );
MassIndexer indexer = searchSession.massIndexer( Root.class );
// add operations on indexes can follow any random order,
// since they are executed by different threads
backendMock.expectWorks(
Root.INDEX, DocumentCommitStrategy.NONE, DocumentRefreshStrategy.NONE
)
.add( "1000000", b -> b
.field( "id", 1000000L )
);
backendMock.expectWorks(
ARoot.INDEX, DocumentCommitStrategy.NONE, DocumentRefreshStrategy.NONE
)
.add( "1000001", b -> b
.field( "id", 1000001L )
);
// purgeAtStart and mergeSegmentsAfterPurge are enabled by default,
// so we expect 1 purge, 1 mergeSegments and 1 flush calls in this order:
backendMock.expectIndexScaleWorks( Root.INDEX )
.purge()
.mergeSegments()
.flush()
.refresh();
backendMock.expectIndexScaleWorks( ARoot.INDEX )
.purge()
.mergeSegments()
.flush()
.refresh();
try {
indexer.startAndWait();
}
catch (InterruptedException e) {
fail( "Unexpected InterruptedException: " + e.getMessage() );
}
} );
statementInspector.hasSelects( 6 )
// select count(a1_0.id) from aroot a1_0
// select count(r1_0.id) from root r1_0
// where r1_0.type in (?)
.anyMatch( "select count(_big)?\\([a-z0-9_.]+\\) from aroot [a-z0-9_.]+" )
.anyMatch( "select count(_big)?\\([a-z0-9_.]+\\) from root [a-z0-9_.]+ "
+ "where .+"
)
// select a1_0.id from aroot a1_0
// select r1_0.id from root r1_0 where r1_0.type in (?)
.anyMatch( "select [a-z0-9_.]+ from aroot [a-z0-9_.]+" )
.anyMatch( "select [a-z0-9_.]+ from root [a-z0-9_.]+ "
+ "where .+"
)
// select a1_0.id from aroot a1_0 where a1_0.id=?
// select r1_0.id,r1_0.type from root r1_0 where r1_0.id=?
.anyMatch( "select [a-z0-9_.,]+ from aroot [a-z0-9_.]+" )
.anyMatch( "select [a-z0-9_.,]+ from root [a-z0-9_.]+ "
+ "where .+"
);
backendMock.verifyExpectationsMet();
}
@Test
void tablePerClass() {
with( sessionFactory ).runNoTransaction( session -> {
SearchSession searchSession = Search.session( session );
MassIndexer indexer = searchSession.massIndexer( RootTablePerClass.class );
// add operations on indexes can follow any random order,
// since they are executed by different threads
backendMock.expectWorks(
RootTablePerClass.INDEX, DocumentCommitStrategy.NONE, DocumentRefreshStrategy.NONE )
.add( "3000000", b -> b.field( "id", 3000000L ) );
backendMock.expectWorks(
ARootTablePerClass.INDEX, DocumentCommitStrategy.NONE, DocumentRefreshStrategy.NONE )
.add( "3000001", b -> b.field( "id", 3000001L ) );
backendMock.expectWorks(
BRootTablePerClass.INDEX, DocumentCommitStrategy.NONE, DocumentRefreshStrategy.NONE )
.add( "3000002", b -> b.field( "id", 3000002L ) );
// purgeAtStart and mergeSegmentsAfterPurge are enabled by default,
// so we expect 1 purge, 1 mergeSegments and 1 flush calls in this order:
backendMock.expectIndexScaleWorks( RootTablePerClass.INDEX )
.purge()
.mergeSegments()
.flush()
.refresh();
backendMock.expectIndexScaleWorks( ARootTablePerClass.INDEX )
.purge()
.mergeSegments()
.flush()
.refresh();
backendMock.expectIndexScaleWorks( BRootTablePerClass.INDEX )
.purge()
.mergeSegments()
.flush()
.refresh();
try {
indexer.startAndWait();
}
catch (InterruptedException e) {
fail( "Unexpected InterruptedException: " + e.getMessage() );
}
} );
statementInspector.hasSelects( 9 )
// select count(atpc1_0.id) from aroottableperclass atpc1_0
// select count(btpc1_0.id) from broottableperclass btpc1_0
// select count(rtpc1_0.id) from (select id, 0 as clazz_ from roottableperclass union all select id, 1 as clazz_ from aroottableperclass union all select id, 2 as clazz_ from broottableperclass) rtpc1_0 where rtpc1_0.clazz_ in (?)
.anyMatch( "select count(_big)?\\([a-z0-9_.]+\\) from aroottableperclass [a-z0-9_.]+" )
.anyMatch( "select count(_big)?\\([a-z0-9_.]+\\) from broottableperclass [a-z0-9_.]+" )
.anyMatch( "select count(_big)?\\([a-z0-9_.]+\\) from \\(select .+union all.+\\) [a-z0-9_.]+ where .+" )
// select btpc1_0.id from broottableperclass btpc1_0
// select atpc1_0.id from aroottableperclass atpc1_0
// select rtpc1_0.id from (select id, 0 as clazz_ from roottableperclass union all select id, 1 as clazz_ from aroottableperclass union all select id, 2 as clazz_ from broottableperclass) rtpc1_0 where rtpc1_0.clazz_ in (?)
.anyMatch( "select [a-z0-9_.]+ from aroottableperclass [a-z0-9_.]+" )
.anyMatch( "select [a-z0-9_.]+ from broottableperclass [a-z0-9_.]+" )
.anyMatch( "select [a-z0-9_.]+ from \\(select .+union all.+\\) [a-z0-9_.]+ where .+" )
// select btpc1_0.id from broottableperclass btpc1_0 where btpc1_0.id=?
// select atpc1_0.id from aroottableperclass atpc1_0 where atpc1_0.id=?
// select rtpc1_0.id,rtpc1_0.clazz_ from (select id, 0 as clazz_ from roottableperclass union all select id, 1 as clazz_ from aroottableperclass union all select id, 2 as clazz_ from broottableperclass) rtpc1_0 where rtpc1_0.id=?
.anyMatch( "select [a-z0-9_.]+ from aroottableperclass [a-z0-9_.]+ where .+" )
.anyMatch( "select [a-z0-9_.]+ from broottableperclass [a-z0-9_.]+ where .+" )
.anyMatch( "select [a-z0-9_.]+ from \\(select .+union all.+\\) [a-z0-9_.]+ where .+" );
backendMock.verifyExpectationsMet();
}
@Test
void singleTable() {
with( sessionFactory ).runNoTransaction( session -> {
SearchSession searchSession = Search.session( session );
MassIndexer indexer = searchSession.massIndexer( RootSingleTable.class );
// add operations on indexes can follow any random order,
// since they are executed by different threads
backendMock.expectWorks(
RootSingleTable.INDEX, DocumentCommitStrategy.NONE, DocumentRefreshStrategy.NONE )
.add( "2000000", b -> b.field( "id", 2000000L ) );
backendMock.expectWorks(
ARootSingleTable.INDEX, DocumentCommitStrategy.NONE, DocumentRefreshStrategy.NONE )
.add( "2000001", b -> b.field( "id", 2000001L ) );
// purgeAtStart and mergeSegmentsAfterPurge are enabled by default,
// so we expect 1 purge, 1 mergeSegments and 1 flush calls in this order:
backendMock.expectIndexScaleWorks( RootSingleTable.INDEX )
.purge()
.mergeSegments()
.flush()
.refresh();
backendMock.expectIndexScaleWorks( ARootSingleTable.INDEX )
.purge()
.mergeSegments()
.flush()
.refresh();
try {
indexer.startAndWait();
}
catch (InterruptedException e) {
fail( "Unexpected InterruptedException: " + e.getMessage() );
}
} );
statementInspector.hasSelects( 3 )
// select count(rst1_0.id) from rootsingletable rst1_0 where rst1_0.dtype in (?,?)
.anyMatch( "select count(_big)?\\([a-z0-9_.]+\\) from rootsingletable [a-z0-9_.]+ where .+" )
// select rst1_0.id from rootsingletable rst1_0 where rst1_0.dtype in (?,?)
.anyMatch( "select [a-z0-9_.]+ from rootsingletable [a-z0-9_.]+ where .+" )
// select rst1_0.id,rst1_0.dtype from rootsingletable rst1_0 where rst1_0.id in (?,?)
.anyMatch( "select [a-z0-9_.,]+ from rootsingletable [a-z0-9_.]+ where .+" );
backendMock.verifyExpectationsMet();
}
@Entity(name = "Car")
@Indexed(index = Car.INDEX)
public static class Car extends DooredVehicle {
public static final String INDEX = "car";
public String carHood;
public static Car create(Long id) {
Car car = new Car();
car.id = id;
return car;
}
}
@Entity(name = "Truck")
@Indexed(index = Truck.INDEX)
public static class Truck extends BedVehicle {
public static final String INDEX = "truck";
public String truckRoof;
public static Truck create(Long id) {
Truck truck = new Truck();
truck.id = id;
return truck;
}
}
@Entity(name = "Van")
public static class Van extends DooredVehicle {
public static Van create(long id) {
Van van = new Van();
van.id = id;
return van;
}
}
@Entity(name = "DooredVehicle")
public static class DooredVehicle extends BaseVehicle {
private static final String INDEX = "DooredVehicle";
public String doorType;
}
@Entity(name = "BedVehicle")
@Indexed(index = BedVehicle.INDEX)
public static class BedVehicle extends DooredVehicle {
private static final String INDEX = "BedVehicle";
public String bedType;
public static BedVehicle create(Long id) {
BedVehicle vehicle = new BedVehicle();
vehicle.id = id;
return vehicle;
}
}
@Entity(name = "BaseVehicle")
public static class BaseVehicle extends BaseEntity {
public String bodyType;
}
@Entity(name = "BaseEntity")
@DiscriminatorColumn(name = "type")
@Inheritance(strategy = InheritanceType.JOINED)
public static class BaseEntity implements Serializable {
@Id
@DocumentId
@GenericField(aggregable = Aggregable.YES, sortable = Sortable.YES, searchable = Searchable.YES)
public Long id;
}
@Entity(name = A.INDEX)
@DiscriminatorColumn(name = "type")
@Inheritance(strategy = InheritanceType.JOINED)
@Indexed(index = A.INDEX)
public static class A implements Serializable {
private static final String INDEX = "A";
@Id
@DocumentId
@GenericField
public Long id;
public static A create(Long id) {
A e = new A();
e.id = id;
return e;
}
}
@Entity(name = "AA")
@Indexed(index = AA.INDEX)
public static class AA extends A {
private static final String INDEX = "AA";
public static AA create(Long id) {
AA e = new AA();
e.id = id;
return e;
}
}
@Entity(name = "BA")
@Indexed(index = BA.INDEX)
public static class BA extends A {
private static final String INDEX = "BA";
public static BA create(Long id) {
BA e = new BA();
e.id = id;
return e;
}
}
@Indexed(enabled = false)
@Entity(name = "ABA")
public static class ABA extends BA {
public static ABA create(Long id) {
ABA e = new ABA();
e.id = id;
return e;
}
}
public abstract static class AABA extends ABA {
public static ABA create(Long id) {
ABA e = new ABA();
e.id = id;
return e;
}
}
@Indexed(index = AAABA.INDEX)
@Entity(name = "AAABA")
public static class AAABA extends AABA {
private static final String INDEX = "AAABA";
public static AABA create(Long id) {
AAABA e = new AAABA();
e.id = id;
return e;
}
}
@Entity(name = "BBA")
@Indexed(index = BBA.INDEX)
public static class BBA extends BA {
private static final String INDEX = "BBA";
public static BBA create(Long id) {
BBA e = new BBA();
e.id = id;
return e;
}
}
@Entity(name = Root.INDEX)
@DiscriminatorColumn(name = "type")
@Inheritance(strategy = InheritanceType.JOINED)
@Indexed(index = Root.INDEX)
public static class Root implements Serializable {
private static final String INDEX = "Root";
@Id
@DocumentId
@GenericField
public Long id;
public static Root create(Long id) {
Root e = new Root();
e.id = id;
return e;
}
}
@Entity(name = ARoot.INDEX)
@Indexed(index = ARoot.INDEX)
public static class ARoot extends Root {
private static final String INDEX = "ARoot";
public static Root create(Long id) {
Root e = new ARoot();
e.id = id;
return e;
}
}
@Entity(name = BRoot.INDEX)
@Indexed(enabled = false)
public static class BRoot extends Root {
private static final String INDEX = "BRoot";
public static Root create(Long id) {
Root e = new BRoot();
e.id = id;
return e;
}
}
@Entity(name = RootTablePerClass.INDEX)
@Inheritance(strategy = InheritanceType.TABLE_PER_CLASS)
@Indexed(index = RootTablePerClass.INDEX)
public static class RootTablePerClass implements Serializable {
private static final String INDEX = "RootTablePerClass";
@Id
@DocumentId
@GenericField
public Long id;
public static RootTablePerClass create(Long id) {
RootTablePerClass e = new RootTablePerClass();
e.id = id;
return e;
}
}
@Entity(name = ARootTablePerClass.INDEX)
@Indexed(index = ARootTablePerClass.INDEX)
public static class ARootTablePerClass extends RootTablePerClass {
private static final String INDEX = "ARootTablePerClass";
public static ARootTablePerClass create(Long id) {
ARootTablePerClass e = new ARootTablePerClass();
e.id = id;
return e;
}
}
@Entity(name = BRootTablePerClass.INDEX)
@Indexed(index = BRootTablePerClass.INDEX)
public static class BRootTablePerClass extends RootTablePerClass {
private static final String INDEX = "BRootTablePerClass";
public static BRootTablePerClass create(Long id) {
BRootTablePerClass e = new BRootTablePerClass();
e.id = id;
return e;
}
}
@Entity(name = RootSingleTable.INDEX)
@Inheritance(strategy = InheritanceType.SINGLE_TABLE)
@Indexed(index = RootSingleTable.INDEX)
public static class RootSingleTable implements Serializable {
private static final String INDEX = "RootSingleTable";
@Id
@DocumentId
@GenericField
public Long id;
public static RootSingleTable create(Long id) {
RootSingleTable e = new RootSingleTable();
e.id = id;
return e;
}
}
@Entity(name = ARootSingleTable.INDEX)
@Indexed(index = ARootSingleTable.INDEX)
public static class ARootSingleTable extends RootSingleTable {
private static final String INDEX = "ARootSingleTable";
public static ARootSingleTable create(Long id) {
ARootSingleTable e = new ARootSingleTable();
e.id = id;
return e;
}
}
@Entity(name = BRootSingleTable.INDEX)
@Indexed(enabled = false)
public static class BRootSingleTable extends RootSingleTable {
private static final String INDEX = "BRootSingleTable";
public static BRootSingleTable create(Long id) {
BRootSingleTable e = new BRootSingleTable();
e.id = id;
return e;
}
}
public static class MassIndexerStatementInspector implements StatementInspector {
private Set<String> selects;
private ByteArrayOutputStream outputStream;
@Override
public String inspect(String sql) {
if ( sql.toLowerCase( Locale.ROOT ).contains( "select" ) ) {
selects.add( sql );
}
return sql;
}
public void reset(ByteArrayOutputStream outputStream) throws IOException {
close();
this.outputStream = outputStream;
}
public MassIndexerStatementInspector hasSelects(int size) {
assertThat( selects() ).hasSize( size );
return this;
}
public MassIndexerStatementInspector anyMatch(String pattern) {
assertThat( selects() ).anyMatch( statement -> statement.matches( pattern ) );
return this;
}
private Set<String> selects() {
if ( selects == null ) {
String loggedQueries = outputStream.toString( StandardCharsets.UTF_8 );
selects = loggedQueries.lines()
.filter( log -> log.contains( "select" ) )
.map( log -> log.replace( "Hibernate: ", "" ).toLowerCase( Locale.ROOT ) )
.collect( Collectors.toSet() );
}
return selects;
}
public void close() throws IOException {
if ( selects != null ) {
selects.clear();
selects = null;
}
if ( outputStream != null ) {
outputStream.close();
}
}
}
}
|
hibernate/hibernate-ogm | 35,136 | core/src/main/java/org/hibernate/ogm/jdbc/impl/TupleAsMapResultSet.java | /*
* Hibernate OGM, Domain model persistence for NoSQL datastores
*
* License: GNU Lesser General Public License (LGPL), version 2.1 or later
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
*/
package org.hibernate.ogm.jdbc.impl;
import java.io.InputStream;
import java.io.Reader;
import java.math.BigDecimal;
import java.net.URL;
import java.sql.Array;
import java.sql.Blob;
import java.sql.Clob;
import java.sql.Date;
import java.sql.NClob;
import java.sql.Ref;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.RowId;
import java.sql.SQLException;
import java.sql.SQLWarning;
import java.sql.SQLXML;
import java.sql.Statement;
import java.sql.Time;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import java.util.Map;
import org.hibernate.ogm.model.spi.Tuple;
/**
* Implements JDBC's ResultSet interface but is essentially a wrapper for
* propagating a list of {@code Map<String, Object>} that reach represent a tuple.
* <p>
* The ResultSet implementation is close to none (don't use it as a regular ResultSet)
* - currently only implement next() for moving along the set of tuples
* - implements unwrap / isWrapperFor for TupleAsMapResultSet.class
* <p>
* Otherwise, to add a tuple, use addTuple.
* To read the current tuple, use getTuple()
* To move forward, use next() (throws a SQLException, I know that sucks)
*
* @author Emmanuel Bernard
*/
public class TupleAsMapResultSet implements ResultSet {
private List<Tuple> tuples = new ArrayList<Tuple>();
private int index = -1;
/**
* Define the current value for the collection entry
*
* @param tuple the tuple to add
*/
public void addTuple(Tuple tuple) {
this.tuples.add( tuple );
}
/**
* Set the tuples
*
* @param tuples the new tuples
*/
public void setTuples(List<Tuple> tuples) {
this.tuples = tuples;
}
/**
* Retrieve the current value for the collection entry
*
* @return the current tuple
*/
public Tuple getTuple() {
return tuples.get( index );
}
@Override
public boolean next() throws SQLException {
int currentIndex = index + 1;
if ( currentIndex < tuples.size() ) {
index = currentIndex;
return true;
}
else {
return false;
}
}
@Override
public void beforeFirst() throws SQLException {
index = -1;
}
@Override
public <T> T unwrap(Class<T> iface) throws SQLException {
if ( iface == this.getClass() ) {
return (T) this;
}
throw new SQLException( "Cannot convert to " + iface );
}
@Override
public void close() throws SQLException {
tuples.clear();
}
@Override
public boolean isWrapperFor(Class<?> iface) throws SQLException {
return ( iface == this.getClass() ) ? true : false;
}
@Override
public boolean wasNull() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public String getString(int columnIndex) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean getBoolean(int columnIndex) throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public byte getByte(int columnIndex) throws SQLException {
return 0; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public short getShort(int columnIndex) throws SQLException {
return 0; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public int getInt(int columnIndex) throws SQLException {
return 0; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public long getLong(int columnIndex) throws SQLException {
return 0; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public float getFloat(int columnIndex) throws SQLException {
return 0; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public double getDouble(int columnIndex) throws SQLException {
return 0; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public BigDecimal getBigDecimal(int columnIndex, int scale) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public byte[] getBytes(int columnIndex) throws SQLException {
return new byte[0]; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public Date getDate(int columnIndex) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public Time getTime(int columnIndex) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public Timestamp getTimestamp(int columnIndex) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public InputStream getAsciiStream(int columnIndex) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public InputStream getUnicodeStream(int columnIndex) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public InputStream getBinaryStream(int columnIndex) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public String getString(String columnLabel) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean getBoolean(String columnLabel) throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public byte getByte(String columnLabel) throws SQLException {
return 0; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public short getShort(String columnLabel) throws SQLException {
return 0; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public int getInt(String columnLabel) throws SQLException {
return 0; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public long getLong(String columnLabel) throws SQLException {
return 0; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public float getFloat(String columnLabel) throws SQLException {
return 0; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public double getDouble(String columnLabel) throws SQLException {
return 0; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public BigDecimal getBigDecimal(String columnLabel, int scale) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public byte[] getBytes(String columnLabel) throws SQLException {
return new byte[0]; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public Date getDate(String columnLabel) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public Time getTime(String columnLabel) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public Timestamp getTimestamp(String columnLabel) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public InputStream getAsciiStream(String columnLabel) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public InputStream getUnicodeStream(String columnLabel) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public InputStream getBinaryStream(String columnLabel) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public SQLWarning getWarnings() throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void clearWarnings() throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public String getCursorName() throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public ResultSetMetaData getMetaData() throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public Object getObject(int columnIndex) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public Object getObject(String columnLabel) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public int findColumn(String columnLabel) throws SQLException {
return 0; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public Reader getCharacterStream(int columnIndex) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public Reader getCharacterStream(String columnLabel) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public BigDecimal getBigDecimal(int columnIndex) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public BigDecimal getBigDecimal(String columnLabel) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean isBeforeFirst() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean isAfterLast() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean isFirst() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean isLast() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void afterLast() throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean first() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean last() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public int getRow() throws SQLException {
return 0; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean absolute(int row) throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean relative(int rows) throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean previous() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void setFetchDirection(int direction) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public int getFetchDirection() throws SQLException {
return 0; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void setFetchSize(int rows) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public int getFetchSize() throws SQLException {
return 0; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public int getType() throws SQLException {
return 0; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public int getConcurrency() throws SQLException {
return 0; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean rowUpdated() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean rowInserted() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean rowDeleted() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateNull(int columnIndex) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateBoolean(int columnIndex, boolean x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateByte(int columnIndex, byte x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateShort(int columnIndex, short x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateInt(int columnIndex, int x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateLong(int columnIndex, long x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateFloat(int columnIndex, float x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateDouble(int columnIndex, double x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateBigDecimal(int columnIndex, BigDecimal x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateString(int columnIndex, String x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateBytes(int columnIndex, byte[] x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateDate(int columnIndex, Date x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateTime(int columnIndex, Time x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateTimestamp(int columnIndex, Timestamp x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateAsciiStream(int columnIndex, InputStream x, int length) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateBinaryStream(int columnIndex, InputStream x, int length) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateCharacterStream(int columnIndex, Reader x, int length) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateObject(int columnIndex, Object x, int scaleOrLength) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateObject(int columnIndex, Object x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateNull(String columnLabel) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateBoolean(String columnLabel, boolean x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateByte(String columnLabel, byte x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateShort(String columnLabel, short x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateInt(String columnLabel, int x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateLong(String columnLabel, long x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateFloat(String columnLabel, float x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateDouble(String columnLabel, double x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateBigDecimal(String columnLabel, BigDecimal x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateString(String columnLabel, String x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateBytes(String columnLabel, byte[] x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateDate(String columnLabel, Date x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateTime(String columnLabel, Time x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateTimestamp(String columnLabel, Timestamp x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateAsciiStream(String columnLabel, InputStream x, int length) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateBinaryStream(String columnLabel, InputStream x, int length) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateCharacterStream(String columnLabel, Reader reader, int length) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateObject(String columnLabel, Object x, int scaleOrLength) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateObject(String columnLabel, Object x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void insertRow() throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateRow() throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void deleteRow() throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void refreshRow() throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void cancelRowUpdates() throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void moveToInsertRow() throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void moveToCurrentRow() throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public Statement getStatement() throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public Object getObject(int columnIndex, Map<String, Class<?>> map) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public Ref getRef(int columnIndex) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public Blob getBlob(int columnIndex) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public Clob getClob(int columnIndex) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public Array getArray(int columnIndex) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public Object getObject(String columnLabel, Map<String, Class<?>> map) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public Ref getRef(String columnLabel) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public Blob getBlob(String columnLabel) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public Clob getClob(String columnLabel) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public Array getArray(String columnLabel) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public Date getDate(int columnIndex, Calendar cal) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public Date getDate(String columnLabel, Calendar cal) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public Time getTime(int columnIndex, Calendar cal) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public Time getTime(String columnLabel, Calendar cal) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public Timestamp getTimestamp(int columnIndex, Calendar cal) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public Timestamp getTimestamp(String columnLabel, Calendar cal) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public URL getURL(int columnIndex) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public URL getURL(String columnLabel) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateRef(int columnIndex, Ref x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateRef(String columnLabel, Ref x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateBlob(int columnIndex, Blob x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateBlob(String columnLabel, Blob x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateClob(int columnIndex, Clob x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateClob(String columnLabel, Clob x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateArray(int columnIndex, Array x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateArray(String columnLabel, Array x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public RowId getRowId(int columnIndex) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public RowId getRowId(String columnLabel) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateRowId(int columnIndex, RowId x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateRowId(String columnLabel, RowId x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public int getHoldability() throws SQLException {
return 0; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean isClosed() throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateNString(int columnIndex, String nString) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateNString(String columnLabel, String nString) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateNClob(int columnIndex, NClob nClob) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateNClob(String columnLabel, NClob nClob) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public NClob getNClob(int columnIndex) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public NClob getNClob(String columnLabel) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public SQLXML getSQLXML(int columnIndex) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public SQLXML getSQLXML(String columnLabel) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateSQLXML(int columnIndex, SQLXML xmlObject) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateSQLXML(String columnLabel, SQLXML xmlObject) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public String getNString(int columnIndex) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public String getNString(String columnLabel) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public Reader getNCharacterStream(int columnIndex) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public Reader getNCharacterStream(String columnLabel) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateNCharacterStream(int columnIndex, Reader x, long length) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateNCharacterStream(String columnLabel, Reader reader, long length) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateAsciiStream(int columnIndex, InputStream x, long length) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateBinaryStream(int columnIndex, InputStream x, long length) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateCharacterStream(int columnIndex, Reader x, long length) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateAsciiStream(String columnLabel, InputStream x, long length) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateBinaryStream(String columnLabel, InputStream x, long length) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateCharacterStream(String columnLabel, Reader reader, long length) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateBlob(int columnIndex, InputStream inputStream, long length) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateBlob(String columnLabel, InputStream inputStream, long length) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateClob(int columnIndex, Reader reader, long length) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateClob(String columnLabel, Reader reader, long length) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateNClob(int columnIndex, Reader reader, long length) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateNClob(String columnLabel, Reader reader, long length) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateNCharacterStream(int columnIndex, Reader x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateNCharacterStream(String columnLabel, Reader reader) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateAsciiStream(int columnIndex, InputStream x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateBinaryStream(int columnIndex, InputStream x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateCharacterStream(int columnIndex, Reader x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateAsciiStream(String columnLabel, InputStream x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateBinaryStream(String columnLabel, InputStream x) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateCharacterStream(String columnLabel, Reader reader) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateBlob(int columnIndex, InputStream inputStream) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateBlob(String columnLabel, InputStream inputStream) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateClob(int columnIndex, Reader reader) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateClob(String columnLabel, Reader reader) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateNClob(int columnIndex, Reader reader) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void updateNClob(String columnLabel, Reader reader) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public <T> T getObject(int columnIndex, Class<T> type) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public <T> T getObject(String columnLabel, Class<T> type) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
}
|
apache/directory-server | 36,085 | jdbm-partition/src/main/java/org/apache/directory/server/core/partition/impl/btree/jdbm/JdbmPartition.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.server.core.partition.impl.btree.jdbm;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.UUID;
import org.apache.directory.api.ldap.model.constants.SchemaConstants;
import org.apache.directory.api.ldap.model.csn.CsnFactory;
import org.apache.directory.api.ldap.model.cursor.Cursor;
import org.apache.directory.api.ldap.model.cursor.CursorException;
import org.apache.directory.api.ldap.model.cursor.Tuple;
import org.apache.directory.api.ldap.model.entry.Attribute;
import org.apache.directory.api.ldap.model.entry.DefaultEntry;
import org.apache.directory.api.ldap.model.entry.Entry;
import org.apache.directory.api.ldap.model.entry.Value;
import org.apache.directory.api.ldap.model.exception.LdapException;
import org.apache.directory.api.ldap.model.exception.LdapOtherException;
import org.apache.directory.api.ldap.model.exception.LdapSchemaViolationException;
import org.apache.directory.api.ldap.model.message.ResultCodeEnum;
import org.apache.directory.api.ldap.model.name.Dn;
import org.apache.directory.api.ldap.model.schema.AttributeType;
import org.apache.directory.api.ldap.model.schema.SchemaManager;
import org.apache.directory.api.util.exception.MultiException;
import org.apache.directory.server.constants.ApacheSchemaConstants;
import org.apache.directory.server.core.api.DnFactory;
import org.apache.directory.server.core.api.entry.ClonedServerEntry;
import org.apache.directory.server.core.api.interceptor.context.AddOperationContext;
import org.apache.directory.server.core.api.interceptor.context.DeleteOperationContext;
import org.apache.directory.server.core.api.interceptor.context.LookupOperationContext;
import org.apache.directory.server.core.api.interceptor.context.ModifyOperationContext;
import org.apache.directory.server.core.api.interceptor.context.MoveAndRenameOperationContext;
import org.apache.directory.server.core.api.interceptor.context.MoveOperationContext;
import org.apache.directory.server.core.api.interceptor.context.OperationContext;
import org.apache.directory.server.core.api.interceptor.context.RenameOperationContext;
import org.apache.directory.server.core.api.partition.Partition;
import org.apache.directory.server.core.api.partition.PartitionReadTxn;
import org.apache.directory.server.core.api.partition.PartitionTxn;
import org.apache.directory.server.core.api.partition.PartitionWriteTxn;
import org.apache.directory.server.core.partition.impl.btree.AbstractBTreePartition;
import org.apache.directory.server.i18n.I18n;
import org.apache.directory.server.xdbm.Index;
import org.apache.directory.server.xdbm.ParentIdAndRdn;
import org.apache.directory.server.xdbm.search.impl.CursorBuilder;
import org.apache.directory.server.xdbm.search.impl.DefaultOptimizer;
import org.apache.directory.server.xdbm.search.impl.DefaultSearchEngine;
import org.apache.directory.server.xdbm.search.impl.EvaluatorBuilder;
import org.apache.directory.server.xdbm.search.impl.NoOpOptimizer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
import jdbm.RecordManager;
import jdbm.helper.MRU;
import jdbm.recman.BaseRecordManager;
import jdbm.recman.CacheRecordManager;
import jdbm.recman.TransactionManager;
/**
* A {@link Partition} that stores entries in
* <a href="http://jdbm.sourceforge.net/">JDBM</a> database.
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
public class JdbmPartition extends AbstractBTreePartition
{
/** static logger */
private static final Logger LOG = LoggerFactory.getLogger( JdbmPartition.class );
private static final String JDBM_DB_FILE_EXTN = ".db";
private static final FilenameFilter DB_FILTER = new FilenameFilter()
{
@Override
public boolean accept( File dir, String name )
{
// really important to filter master.db and master.lg files
return name.endsWith( JDBM_DB_FILE_EXTN ) && !name.startsWith( "master." );
}
};
/** the JDBM record manager used by this database */
private RecordManager recMan;
/** the entry cache */
private Cache< String, Entry > entryCache;
/**
* Creates a store based on JDBM B+Trees.
*
* @param schemaManager The SchemaManager instance
* @param dnFactory The DN factory instance
*/
public JdbmPartition( SchemaManager schemaManager, DnFactory dnFactory )
{
super( schemaManager, dnFactory );
// Initialize the cache size
if ( cacheSize < 0 )
{
cacheSize = DEFAULT_CACHE_SIZE;
LOG.debug( "Using the default entry cache size of {} for {} partition", cacheSize, id );
}
else
{
LOG.debug( "Using the custom configured cache size of {} for {} partition", cacheSize, id );
}
}
/**
* Rebuild the indexes
*/
private int rebuildIndexes( PartitionTxn partitionTxn ) throws LdapException, IOException
{
Cursor<Tuple<String, Entry>> cursor = getMasterTable().cursor();
int masterTableCount = 0;
int repaired = 0;
System.out.println( "Re-building indices..." );
boolean ctxEntryLoaded = false;
try
{
while ( cursor.next() )
{
masterTableCount++;
Tuple<String, Entry> tuple = cursor.get();
String id = tuple.getKey();
Entry entry = tuple.getValue();
// Start with the RdnIndex
String parentId = entry.get( ApacheSchemaConstants.ENTRY_PARENT_ID_OID ).getString();
System.out.println( "Read entry " + entry.getDn() + " with ID " + id + " and parent ID " + parentId );
Dn dn = entry.getDn();
ParentIdAndRdn parentIdAndRdn = null;
// context entry may have more than one RDN
if ( !ctxEntryLoaded && getSuffixDn().getName().startsWith( dn.getName() ) )
{
// If the read entry is the context entry, inject a tuple that have one or more RDNs
parentIdAndRdn = new ParentIdAndRdn( parentId, getSuffixDn().getRdns() );
ctxEntryLoaded = true;
}
else
{
parentIdAndRdn = new ParentIdAndRdn( parentId, dn.getRdn() );
}
// Inject the parentIdAndRdn in the rdnIndex
rdnIdx.add( partitionTxn, parentIdAndRdn, id );
// Process the ObjectClass index
// Update the ObjectClass index
Attribute objectClass = entry.get( objectClassAT );
if ( objectClass == null )
{
String msg = I18n.err( I18n.ERR_49009_ENTRY_WITHOUT_OBJECT_CLASS, dn, entry );
ResultCodeEnum rc = ResultCodeEnum.OBJECT_CLASS_VIOLATION;
throw new LdapSchemaViolationException( rc, msg );
}
for ( Value value : objectClass )
{
String valueStr = value.getString();
if ( valueStr.equals( SchemaConstants.TOP_OC ) )
{
continue;
}
objectClassIdx.add( partitionTxn, valueStr, id );
}
// The Alias indexes
if ( objectClass.contains( SchemaConstants.ALIAS_OC ) )
{
Attribute aliasAttr = entry.get( aliasedObjectNameAT );
addAliasIndices( partitionTxn, id, dn, new Dn( schemaManager, aliasAttr.getString() ) );
}
// The entryCSN index
// Update the EntryCsn index
Attribute entryCsn = entry.get( entryCsnAT );
if ( entryCsn == null )
{
String msg = I18n.err( I18n.ERR_49010_ENTRY_WITHOUT_ENTRY_CSN, dn, entry );
throw new LdapSchemaViolationException( ResultCodeEnum.OBJECT_CLASS_VIOLATION, msg );
}
entryCsnIdx.add( partitionTxn, entryCsn.getString(), id );
// The AdministrativeRole index
// Update the AdministrativeRole index, if needed
if ( entry.containsAttribute( administrativeRoleAT ) )
{
// We may have more than one role
Attribute adminRoles = entry.get( administrativeRoleAT );
for ( Value value : adminRoles )
{
adminRoleIdx.add( partitionTxn, value.getString(), id );
}
// Adds only those attributes that are indexed
presenceIdx.add( partitionTxn, administrativeRoleAT.getOid(), id );
}
// And the user indexess
// Now work on the user defined userIndices
for ( Attribute attribute : entry )
{
AttributeType attributeType = attribute.getAttributeType();
String attributeOid = attributeType.getOid();
if ( hasUserIndexOn( attributeType ) )
{
Index<Object, String> idx = ( Index<Object, String> ) getUserIndex( attributeType );
// here lookup by attributeId is OK since we got attributeId from
// the entry via the enumeration - it's in there as is for sure
for ( Value value : attribute )
{
idx.add( partitionTxn, value.getString(), id );
}
// Adds only those attributes that are indexed
presenceIdx.add( partitionTxn, attributeOid, id );
}
}
}
}
catch ( Exception e )
{
System.out.println( "Exiting after fetching entries " + repaired );
throw new LdapOtherException( e.getMessage(), e );
}
finally
{
cursor.close();
}
return masterTableCount;
}
/**
* Update the children and descendant counters in the RDN index
*/
private void updateRdnIndexCounters( PartitionTxn partitionTxn ) throws LdapException, IOException
{
Cursor<Tuple<String, Entry>> cursor = getMasterTable().cursor();
System.out.println( "Updating the RDN index counters..." );
try
{
while ( cursor.next() )
{
Tuple<String, Entry> tuple = cursor.get();
Entry entry = tuple.getValue();
// Update the parent's nbChildren and nbDescendants values
// Start with the RdnIndex
String parentId = entry.get( ApacheSchemaConstants.ENTRY_PARENT_ID_OID ).getString();
if ( parentId != Partition.ROOT_ID )
{
updateRdnIdx( partitionTxn, parentId, ADD_CHILD, 0 );
}
}
}
catch ( Exception e )
{
System.out.println( "Exiting, wasn't able to update the RDN index counters" );
throw new LdapOtherException( e.getMessage(), e );
}
finally
{
cursor.close();
}
}
/**
* {@inheritDoc}
*/
@Override
protected void doRepair() throws LdapException
{
BaseRecordManager base;
try
{
base = new BaseRecordManager( getPartitionPath().getPath() );
TransactionManager transactionManager = base.getTransactionManager();
transactionManager.setMaximumTransactionsInLog( 2000 );
}
catch ( IOException ioe )
{
throw new LdapOtherException( ioe.getMessage(), ioe );
}
// Find the underlying directories
File partitionDir = new File( getPartitionPath() );
// get the names of the db files
List<String> indexDbFileNameList = Arrays.asList( partitionDir.list( DB_FILTER ) );
// then add all index objects to a list
List<String> allIndices = new ArrayList<>();
try
{
// Iterate on the declared indexes, deleting the old ones
for ( Index<?, String> index : getIndexedAttributes() )
{
// Index won't be initialized at this time, so lookup AT registry to get the OID
AttributeType indexAT = schemaManager.lookupAttributeTypeRegistry( index.getAttributeId() );
String oid = indexAT.getOid();
allIndices.add( oid );
// take the part after removing .db from the
String name = oid + JDBM_DB_FILE_EXTN;
// if the name doesn't exist in the list of index DB files
// this is a new index and we need to build it
if ( indexDbFileNameList.contains( name ) )
{
( ( JdbmIndex<?> ) index ).close( null );
File indexFile = new File( partitionDir, name );
indexFile.delete();
// Recreate the index
( ( JdbmIndex<?> ) index ).init( base, schemaManager, indexAT );
}
}
// Ok, now, rebuild the indexes.
int masterTableCount = rebuildIndexes( null );
// Now that the RdnIndex has been rebuilt, we have to update the nbChildren and nbDescendants values
// We loop again on the MasterTable
updateRdnIndexCounters( null );
// Flush the indexes on disk
sync();
System.out.println( "Total entries present in the partition " + masterTableCount );
System.out.println( "Repair complete" );
}
catch ( IOException ioe )
{
throw new LdapOtherException( ioe.getMessage(), ioe );
}
}
@Override
protected void doInit() throws LdapException
{
if ( !initialized )
{
BaseRecordManager base;
// setup optimizer and registries for parent
if ( !optimizerEnabled )
{
setOptimizer( new NoOpOptimizer() );
}
else
{
setOptimizer( new DefaultOptimizer( this ) );
}
EvaluatorBuilder evaluatorBuilder = new EvaluatorBuilder( this, schemaManager );
CursorBuilder cursorBuilder = new CursorBuilder( this, evaluatorBuilder );
setSearchEngine( new DefaultSearchEngine( this, cursorBuilder, evaluatorBuilder, getOptimizer() ) );
// Create the underlying directories (only if needed)
File partitionDir = new File( getPartitionPath() );
if ( !partitionDir.exists() && !partitionDir.mkdirs() )
{
throw new LdapOtherException( I18n.err( I18n.ERR_00004_COULD_NOT_CREATE_DIRECTORY, partitionDir ) );
}
// First, check if the file storing the data exists
String path = partitionDir.getPath() + File.separator + id;
try
{
base = new BaseRecordManager( path );
TransactionManager transactionManager = base.getTransactionManager();
transactionManager.setMaximumTransactionsInLog( 2000 );
// prevent the OOM when more than 50k users are loaded at a stretch
// adding this system property to make it configurable till JDBM gets replaced by Mavibot
String cacheSizeVal = System.getProperty( "jdbm.recman.cache.size", "100" );
int recCacheSize = Integer.parseInt( cacheSizeVal );
LOG.info( "Setting CacheRecondManager's cache size to {}", recCacheSize );
recMan = new CacheRecordManager( base, new MRU( recCacheSize ) );
}
catch ( IOException ioe )
{
throw new LdapOtherException( ioe.getMessage(), ioe );
}
// Iterate on the declared indexes
List<String> allIndices = new ArrayList<>();
List<Index<?, String>> indexToBuild = new ArrayList<>();
for ( Index<?, String> index : getIndexedAttributes() )
{
String oid = schemaManager.lookupAttributeTypeRegistry( index.getAttributeId() ).getOid();
allIndices.add( oid );
// if the name doesn't exist in the database
// this is a new index and we need to build it
try
{
// Check the forward index only (we suppose we never will add a reverse index later on)
String forwardIndex = oid + "_forward";
if ( recMan.getNamedObject( forwardIndex ) == 0 )
{
// The index does not exist in the database, we need to build it
indexToBuild.add( index );
}
}
catch ( IOException ioe )
{
throw new LdapOtherException( ioe.getMessage(), ioe );
}
}
/*
// get all index db files first
File[] allIndexDbFiles = partitionDir.listFiles( DB_FILTER );
// get the names of the db files also
List<String> indexDbFileNameList = Arrays.asList( partitionDir.list( DB_FILTER ) );
// then add all index objects to a list
List<String> allIndices = new ArrayList<>();
List<Index<?, String>> indexToBuild = new ArrayList<>();
// Iterate on the declared indexes
for ( Index<?, String> index : getIndexedAttributes() )
{
// Index won't be initialized at this time, so lookup AT registry to get the OID
String oid = schemaManager.lookupAttributeTypeRegistry( index.getAttributeId() ).getOid();
allIndices.add( oid );
// take the part after removing .db from the
String name = oid + JDBM_DB_FILE_EXTN;
// if the name doesn't exist in the list of index DB files
// this is a new index and we need to build it
if ( !indexDbFileNameList.contains( name ) )
{
indexToBuild.add( index );
}
}
*/
// Initialize the indexes
super.doInit();
if ( cacheSize < 0 )
{
cacheSize = DEFAULT_CACHE_SIZE;
LOG.debug( "Using the default entry cache size of {} for {} partition", cacheSize, id );
}
else
{
LOG.debug( "Using the custom configured cache size of {} for {} partition", cacheSize, id );
}
// Create the master table (the table containing all the entries)
try
{
master = new JdbmMasterTable( recMan, schemaManager );
}
catch ( IOException ioe )
{
throw new LdapOtherException( ioe.getMessage(), ioe );
}
if ( !indexToBuild.isEmpty() )
{
buildUserIndex( beginReadTransaction(), indexToBuild );
}
entryCache = Caffeine.newBuilder().maximumSize( cacheSize ).build();
// Initialization of the context entry
if ( ( suffixDn != null ) && ( contextEntry != null ) )
{
Dn contextEntryDn = contextEntry.getDn();
// Checking if the context entry DN is schema aware
if ( !contextEntryDn.isSchemaAware() )
{
contextEntryDn = new Dn( schemaManager, contextEntryDn );
}
// We're only adding the entry if the two DNs are equal
if ( suffixDn.equals( contextEntryDn ) )
{
// Looking for the current context entry
Entry suffixEntry;
LookupOperationContext lookupContext = new LookupOperationContext( null, suffixDn );
lookupContext.setPartition( this );
try ( PartitionTxn partitionTxn = beginReadTransaction() )
{
lookupContext.setTransaction( partitionTxn );
suffixEntry = lookup( lookupContext );
}
catch ( IOException ioe )
{
throw new LdapOtherException( ioe.getMessage(), ioe );
}
// We're only adding the context entry if it doesn't already exist
if ( suffixEntry == null )
{
// Checking of the context entry is schema aware
if ( !contextEntry.isSchemaAware() )
{
// Making the context entry schema aware
contextEntry = new DefaultEntry( schemaManager, contextEntry );
}
// Adding the 'entryCsn' attribute
if ( contextEntry.get( SchemaConstants.ENTRY_CSN_AT ) == null )
{
contextEntry.add( SchemaConstants.ENTRY_CSN_AT, new CsnFactory( 0 ).newInstance()
.toString() );
}
// Adding the 'entryUuid' attribute
if ( contextEntry.get( SchemaConstants.ENTRY_UUID_AT ) == null )
{
String uuid = UUID.randomUUID().toString();
contextEntry.add( SchemaConstants.ENTRY_UUID_AT, uuid );
}
// And add this entry to the underlying partition
PartitionTxn partitionTxn = null;
AddOperationContext addContext = new AddOperationContext( null, contextEntry );
try
{
partitionTxn = beginWriteTransaction();
addContext.setTransaction( partitionTxn );
add( addContext );
partitionTxn.commit();
}
catch ( LdapException le )
{
if ( partitionTxn != null )
{
try
{
partitionTxn.abort();
}
catch ( IOException ioe )
{
throw new LdapOtherException( ioe.getMessage(), ioe );
}
}
throw le;
}
catch ( IOException ioe )
{
try
{
partitionTxn.abort();
}
catch ( IOException ioe2 )
{
throw new LdapOtherException( ioe2.getMessage(), ioe2 );
}
throw new LdapOtherException( ioe.getMessage(), ioe );
}
}
}
}
// We are done !
initialized = true;
}
}
/**
* {@inheritDoc}}
*/
public String getDefaultId()
{
return Partition.DEFAULT_ID;
}
/**
* {@inheritDoc}
*/
public String getRootId()
{
return Partition.ROOT_ID;
}
/**
* This method is called when the synch thread is waking up, to write
* the modified data.
*
* @throws LdapException on failures to sync database files to disk
*/
@Override
public synchronized void sync() throws LdapException
{
if ( !initialized )
{
return;
}
try
{
// Commit
recMan.commit();
// And flush the journal
BaseRecordManager baseRecordManager = null;
if ( recMan instanceof CacheRecordManager )
{
baseRecordManager = ( ( BaseRecordManager ) ( ( CacheRecordManager ) recMan ).getRecordManager() );
}
else
{
baseRecordManager = ( ( BaseRecordManager ) recMan );
}
baseRecordManager.getTransactionManager().synchronizeLog();
}
catch ( IOException ioe )
{
throw new LdapOtherException( ioe.getMessage(), ioe );
}
}
/**
* Builds user defined indexes on a attributes by browsing all the entries present in master db
*
* Note: if the given list of indices contains any system index that will be skipped.
*
* WARN: MUST be called after calling super.doInit()
*
* @param indices then selected indexes that need to be built
* @throws Exception in case of any problems while building the index
*/
private void buildUserIndex( PartitionTxn partitionTxn, List<Index<?, String>> indices ) throws LdapException
{
try
{
Cursor<Tuple<String, Entry>> cursor = master.cursor();
cursor.beforeFirst();
while ( cursor.next() )
{
for ( Index index : indices )
{
AttributeType atType = index.getAttribute();
String attributeOid = index.getAttribute().getOid();
if ( systemIndices.get( attributeOid ) != null )
{
// skipping building of the system index
continue;
}
LOG.info( "building the index for attribute type {}", atType );
Tuple<String, Entry> tuple = cursor.get();
String id = tuple.getKey();
Entry entry = tuple.getValue();
Attribute entryAttr = entry.get( atType );
if ( entryAttr != null )
{
for ( Value value : entryAttr )
{
index.add( partitionTxn, value.getString(), id );
}
// Adds only those attributes that are indexed
presenceIdx.add( partitionTxn, attributeOid, id );
}
}
}
cursor.close();
}
catch ( CursorException | IOException e )
{
throw new LdapOtherException( e.getMessage(), e );
}
}
/**
* removes any unused/removed attribute index files present under the partition's
* working directory
*/
private void deleteUnusedIndexFiles( List<String> allIndices, File[] dbFiles )
{
for ( File file : dbFiles )
{
String name = file.getName();
// take the part after removing .db from the
name = name.substring( 0, name.lastIndexOf( JDBM_DB_FILE_EXTN ) );
if ( systemIndices.get( name ) != null )
{
// do not delete the system index file
continue;
}
// remove the file if not found in the list of names of indices
if ( !allIndices.contains( name ) )
{
boolean deleted = file.delete();
if ( deleted )
{
LOG.info( "Deleted unused index file {}", file.getAbsolutePath() );
}
else
{
LOG.warn( "Failed to delete unused index file {}", file.getAbsolutePath() );
}
}
}
}
/**
* {@inheritDoc}
*/
@Override
protected Index<?, String> convertAndInit( Index<?, String> index ) throws LdapException
{
JdbmIndex<?> jdbmIndex;
if ( index instanceof JdbmRdnIndex )
{
jdbmIndex = ( JdbmRdnIndex ) index;
}
else if ( index instanceof JdbmDnIndex )
{
jdbmIndex = ( JdbmDnIndex ) index;
}
else if ( index instanceof JdbmIndex<?> )
{
jdbmIndex = ( JdbmIndex<?> ) index;
}
else
{
LOG.debug( "Supplied index {} is not a JdbmIndex. "
+ "Will create new JdbmIndex using copied configuration parameters.", index );
jdbmIndex = new JdbmIndex( index.getAttributeId(), true );
jdbmIndex.setCacheSize( index.getCacheSize() );
jdbmIndex.setNumDupLimit( JdbmIndex.DEFAULT_DUPLICATE_LIMIT );
}
try
{
jdbmIndex.init( recMan, schemaManager, schemaManager.lookupAttributeTypeRegistry( index.getAttributeId() ) );
}
catch ( IOException ioe )
{
throw new LdapOtherException( ioe.getMessage(), ioe );
}
return jdbmIndex;
}
/**
* {@inheritDoc}
*/
@Override
protected synchronized void doDestroy( PartitionTxn partitionTxn ) throws LdapException
{
MultiException errors = new MultiException( I18n.err( I18n.ERR_49003_ERRORS_ENCOUNTERED_ON_DESTROY ) );
if ( !initialized )
{
return;
}
try
{
super.doDestroy( partitionTxn );
}
catch ( Exception e )
{
errors.addThrowable( e );
}
// This is specific to the JDBM store : close the record manager
try
{
recMan.close();
LOG.debug( "Closed record manager for {} partition.", suffixDn );
}
catch ( IOException t )
{
LOG.error( I18n.err( I18n.ERR_34000_FAILED_TO_CLOSE_RECORD_MANAGER ), t );
errors.addThrowable( t );
}
finally
{
if ( entryCache != null )
{
entryCache.invalidateAll();
}
}
if ( errors.size() > 0 )
{
throw new LdapOtherException( errors.getMessage(), errors );
}
}
/**
* {@inheritDoc}
*/
@Override
protected final Index createSystemIndex( String oid, URI path, boolean withReverse ) throws LdapException
{
LOG.debug( "Supplied index {} is not a JdbmIndex. "
+ "Will create new JdbmIndex using copied configuration parameters." );
JdbmIndex<?> jdbmIndex;
if ( oid.equals( ApacheSchemaConstants.APACHE_RDN_AT_OID ) )
{
jdbmIndex = new JdbmRdnIndex();
jdbmIndex.setAttributeId( ApacheSchemaConstants.APACHE_RDN_AT_OID );
jdbmIndex.setNumDupLimit( JdbmIndex.DEFAULT_DUPLICATE_LIMIT );
}
else if ( oid.equals( ApacheSchemaConstants.APACHE_ALIAS_AT_OID ) )
{
jdbmIndex = new JdbmDnIndex( ApacheSchemaConstants.APACHE_ALIAS_AT_OID );
jdbmIndex.setAttributeId( ApacheSchemaConstants.APACHE_ALIAS_AT_OID );
jdbmIndex.setNumDupLimit( JdbmIndex.DEFAULT_DUPLICATE_LIMIT );
}
else
{
jdbmIndex = new JdbmIndex( oid, withReverse );
jdbmIndex.setNumDupLimit( JdbmIndex.DEFAULT_DUPLICATE_LIMIT );
}
jdbmIndex.setWkDirPath( path );
return jdbmIndex;
}
@Override
public void updateCache( OperationContext opCtx )
{
if ( entryCache == null )
{
return;
}
try
{
if ( opCtx instanceof ModifyOperationContext )
{
// replace the entry
ModifyOperationContext modCtx = ( ModifyOperationContext ) opCtx;
Entry entry = modCtx.getAlteredEntry();
String id = entry.get( SchemaConstants.ENTRY_UUID_AT ).getString();
if ( entry instanceof ClonedServerEntry )
{
entry = ( ( ClonedServerEntry ) entry ).getOriginalEntry();
}
entryCache.put( id, entry );
}
else if ( ( opCtx instanceof MoveOperationContext )
|| ( opCtx instanceof MoveAndRenameOperationContext )
|| ( opCtx instanceof RenameOperationContext ) )
{
// clear the cache it is not worth updating all the children
entryCache.invalidateAll();
}
else if ( opCtx instanceof DeleteOperationContext )
{
// delete the entry
DeleteOperationContext delCtx = ( DeleteOperationContext ) opCtx;
entryCache.invalidate( delCtx.getEntry().get( SchemaConstants.ENTRY_UUID_AT ).getString() );
}
}
catch ( LdapException e )
{
LOG.warn( "Failed to update entry cache", e );
}
}
@Override
public Entry lookupCache( String id )
{
return ( entryCache != null ) ? entryCache.getIfPresent( id ) : null;
}
@Override
public void addToCache( String id, Entry entry )
{
if ( entryCache == null )
{
return;
}
Entry addedEntry = entry;
if ( entry instanceof ClonedServerEntry )
{
addedEntry = ( ( ClonedServerEntry ) entry ).getOriginalEntry();
}
entryCache.put( id, addedEntry );
}
@Override
public PartitionReadTxn beginReadTransaction()
{
return new PartitionReadTxn();
}
@Override
public PartitionWriteTxn beginWriteTransaction()
{
return new JdbmPartitionWriteTxn( recMan, isSyncOnWrite() );
}
}
|
googleapis/google-cloud-java | 35,911 | java-dialogflow/proto-google-cloud-dialogflow-v2/src/main/java/com/google/cloud/dialogflow/v2/RestoreAgentRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/v2/agent.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.v2;
/**
*
*
* <pre>
* The request message for
* [Agents.RestoreAgent][google.cloud.dialogflow.v2.Agents.RestoreAgent].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2.RestoreAgentRequest}
*/
public final class RestoreAgentRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2.RestoreAgentRequest)
RestoreAgentRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use RestoreAgentRequest.newBuilder() to construct.
private RestoreAgentRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private RestoreAgentRequest() {
parent_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new RestoreAgentRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2.AgentProto
.internal_static_google_cloud_dialogflow_v2_RestoreAgentRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2.AgentProto
.internal_static_google_cloud_dialogflow_v2_RestoreAgentRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2.RestoreAgentRequest.class,
com.google.cloud.dialogflow.v2.RestoreAgentRequest.Builder.class);
}
private int agentCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object agent_;
public enum AgentCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
AGENT_URI(2),
AGENT_CONTENT(3),
AGENT_NOT_SET(0);
private final int value;
private AgentCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static AgentCase valueOf(int value) {
return forNumber(value);
}
public static AgentCase forNumber(int value) {
switch (value) {
case 2:
return AGENT_URI;
case 3:
return AGENT_CONTENT;
case 0:
return AGENT_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public AgentCase getAgentCase() {
return AgentCase.forNumber(agentCase_);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The project that the agent to restore is associated with.
* Format: `projects/<Project ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The project that the agent to restore is associated with.
* Format: `projects/<Project ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int AGENT_URI_FIELD_NUMBER = 2;
/**
*
*
* <pre>
* The URI to a Google Cloud Storage file containing the agent to restore.
* Note: The URI must start with "gs://".
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string agent_uri = 2;</code>
*
* @return Whether the agentUri field is set.
*/
public boolean hasAgentUri() {
return agentCase_ == 2;
}
/**
*
*
* <pre>
* The URI to a Google Cloud Storage file containing the agent to restore.
* Note: The URI must start with "gs://".
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string agent_uri = 2;</code>
*
* @return The agentUri.
*/
public java.lang.String getAgentUri() {
java.lang.Object ref = "";
if (agentCase_ == 2) {
ref = agent_;
}
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (agentCase_ == 2) {
agent_ = s;
}
return s;
}
}
/**
*
*
* <pre>
* The URI to a Google Cloud Storage file containing the agent to restore.
* Note: The URI must start with "gs://".
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string agent_uri = 2;</code>
*
* @return The bytes for agentUri.
*/
public com.google.protobuf.ByteString getAgentUriBytes() {
java.lang.Object ref = "";
if (agentCase_ == 2) {
ref = agent_;
}
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (agentCase_ == 2) {
agent_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int AGENT_CONTENT_FIELD_NUMBER = 3;
/**
*
*
* <pre>
* Zip compressed raw byte content for agent.
* </pre>
*
* <code>bytes agent_content = 3;</code>
*
* @return Whether the agentContent field is set.
*/
@java.lang.Override
public boolean hasAgentContent() {
return agentCase_ == 3;
}
/**
*
*
* <pre>
* Zip compressed raw byte content for agent.
* </pre>
*
* <code>bytes agent_content = 3;</code>
*
* @return The agentContent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getAgentContent() {
if (agentCase_ == 3) {
return (com.google.protobuf.ByteString) agent_;
}
return com.google.protobuf.ByteString.EMPTY;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (agentCase_ == 2) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, agent_);
}
if (agentCase_ == 3) {
output.writeBytes(3, (com.google.protobuf.ByteString) agent_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (agentCase_ == 2) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, agent_);
}
if (agentCase_ == 3) {
size +=
com.google.protobuf.CodedOutputStream.computeBytesSize(
3, (com.google.protobuf.ByteString) agent_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.v2.RestoreAgentRequest)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.v2.RestoreAgentRequest other =
(com.google.cloud.dialogflow.v2.RestoreAgentRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getAgentCase().equals(other.getAgentCase())) return false;
switch (agentCase_) {
case 2:
if (!getAgentUri().equals(other.getAgentUri())) return false;
break;
case 3:
if (!getAgentContent().equals(other.getAgentContent())) return false;
break;
case 0:
default:
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
switch (agentCase_) {
case 2:
hash = (37 * hash) + AGENT_URI_FIELD_NUMBER;
hash = (53 * hash) + getAgentUri().hashCode();
break;
case 3:
hash = (37 * hash) + AGENT_CONTENT_FIELD_NUMBER;
hash = (53 * hash) + getAgentContent().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.v2.RestoreAgentRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.RestoreAgentRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.RestoreAgentRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.RestoreAgentRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.RestoreAgentRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.RestoreAgentRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.RestoreAgentRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.RestoreAgentRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.RestoreAgentRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.RestoreAgentRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.RestoreAgentRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.RestoreAgentRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.dialogflow.v2.RestoreAgentRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request message for
* [Agents.RestoreAgent][google.cloud.dialogflow.v2.Agents.RestoreAgent].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2.RestoreAgentRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2.RestoreAgentRequest)
com.google.cloud.dialogflow.v2.RestoreAgentRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2.AgentProto
.internal_static_google_cloud_dialogflow_v2_RestoreAgentRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2.AgentProto
.internal_static_google_cloud_dialogflow_v2_RestoreAgentRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2.RestoreAgentRequest.class,
com.google.cloud.dialogflow.v2.RestoreAgentRequest.Builder.class);
}
// Construct using com.google.cloud.dialogflow.v2.RestoreAgentRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
agentCase_ = 0;
agent_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.v2.AgentProto
.internal_static_google_cloud_dialogflow_v2_RestoreAgentRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.RestoreAgentRequest getDefaultInstanceForType() {
return com.google.cloud.dialogflow.v2.RestoreAgentRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.RestoreAgentRequest build() {
com.google.cloud.dialogflow.v2.RestoreAgentRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.RestoreAgentRequest buildPartial() {
com.google.cloud.dialogflow.v2.RestoreAgentRequest result =
new com.google.cloud.dialogflow.v2.RestoreAgentRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
buildPartialOneofs(result);
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.dialogflow.v2.RestoreAgentRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
}
private void buildPartialOneofs(com.google.cloud.dialogflow.v2.RestoreAgentRequest result) {
result.agentCase_ = agentCase_;
result.agent_ = this.agent_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.v2.RestoreAgentRequest) {
return mergeFrom((com.google.cloud.dialogflow.v2.RestoreAgentRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.v2.RestoreAgentRequest other) {
if (other == com.google.cloud.dialogflow.v2.RestoreAgentRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
switch (other.getAgentCase()) {
case AGENT_URI:
{
agentCase_ = 2;
agent_ = other.agent_;
onChanged();
break;
}
case AGENT_CONTENT:
{
setAgentContent(other.getAgentContent());
break;
}
case AGENT_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
agentCase_ = 2;
agent_ = s;
break;
} // case 18
case 26:
{
agent_ = input.readBytes();
agentCase_ = 3;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int agentCase_ = 0;
private java.lang.Object agent_;
public AgentCase getAgentCase() {
return AgentCase.forNumber(agentCase_);
}
public Builder clearAgent() {
agentCase_ = 0;
agent_ = null;
onChanged();
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The project that the agent to restore is associated with.
* Format: `projects/<Project ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The project that the agent to restore is associated with.
* Format: `projects/<Project ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The project that the agent to restore is associated with.
* Format: `projects/<Project ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The project that the agent to restore is associated with.
* Format: `projects/<Project ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The project that the agent to restore is associated with.
* Format: `projects/<Project ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The URI to a Google Cloud Storage file containing the agent to restore.
* Note: The URI must start with "gs://".
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string agent_uri = 2;</code>
*
* @return Whether the agentUri field is set.
*/
@java.lang.Override
public boolean hasAgentUri() {
return agentCase_ == 2;
}
/**
*
*
* <pre>
* The URI to a Google Cloud Storage file containing the agent to restore.
* Note: The URI must start with "gs://".
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string agent_uri = 2;</code>
*
* @return The agentUri.
*/
@java.lang.Override
public java.lang.String getAgentUri() {
java.lang.Object ref = "";
if (agentCase_ == 2) {
ref = agent_;
}
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (agentCase_ == 2) {
agent_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The URI to a Google Cloud Storage file containing the agent to restore.
* Note: The URI must start with "gs://".
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string agent_uri = 2;</code>
*
* @return The bytes for agentUri.
*/
@java.lang.Override
public com.google.protobuf.ByteString getAgentUriBytes() {
java.lang.Object ref = "";
if (agentCase_ == 2) {
ref = agent_;
}
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (agentCase_ == 2) {
agent_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The URI to a Google Cloud Storage file containing the agent to restore.
* Note: The URI must start with "gs://".
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string agent_uri = 2;</code>
*
* @param value The agentUri to set.
* @return This builder for chaining.
*/
public Builder setAgentUri(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
agentCase_ = 2;
agent_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The URI to a Google Cloud Storage file containing the agent to restore.
* Note: The URI must start with "gs://".
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string agent_uri = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearAgentUri() {
if (agentCase_ == 2) {
agentCase_ = 0;
agent_ = null;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The URI to a Google Cloud Storage file containing the agent to restore.
* Note: The URI must start with "gs://".
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string agent_uri = 2;</code>
*
* @param value The bytes for agentUri to set.
* @return This builder for chaining.
*/
public Builder setAgentUriBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
agentCase_ = 2;
agent_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Zip compressed raw byte content for agent.
* </pre>
*
* <code>bytes agent_content = 3;</code>
*
* @return Whether the agentContent field is set.
*/
public boolean hasAgentContent() {
return agentCase_ == 3;
}
/**
*
*
* <pre>
* Zip compressed raw byte content for agent.
* </pre>
*
* <code>bytes agent_content = 3;</code>
*
* @return The agentContent.
*/
public com.google.protobuf.ByteString getAgentContent() {
if (agentCase_ == 3) {
return (com.google.protobuf.ByteString) agent_;
}
return com.google.protobuf.ByteString.EMPTY;
}
/**
*
*
* <pre>
* Zip compressed raw byte content for agent.
* </pre>
*
* <code>bytes agent_content = 3;</code>
*
* @param value The agentContent to set.
* @return This builder for chaining.
*/
public Builder setAgentContent(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
agentCase_ = 3;
agent_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Zip compressed raw byte content for agent.
* </pre>
*
* <code>bytes agent_content = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearAgentContent() {
if (agentCase_ == 3) {
agentCase_ = 0;
agent_ = null;
onChanged();
}
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2.RestoreAgentRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2.RestoreAgentRequest)
private static final com.google.cloud.dialogflow.v2.RestoreAgentRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2.RestoreAgentRequest();
}
public static com.google.cloud.dialogflow.v2.RestoreAgentRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<RestoreAgentRequest> PARSER =
new com.google.protobuf.AbstractParser<RestoreAgentRequest>() {
@java.lang.Override
public RestoreAgentRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<RestoreAgentRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<RestoreAgentRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.RestoreAgentRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,035 | java-visionai/proto-google-cloud-visionai-v1/src/main/java/com/google/cloud/visionai/v1/UpdateSearchConfigRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/visionai/v1/warehouse.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.visionai.v1;
/**
*
*
* <pre>
* Request message for UpdateSearchConfig.
* </pre>
*
* Protobuf type {@code google.cloud.visionai.v1.UpdateSearchConfigRequest}
*/
public final class UpdateSearchConfigRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.visionai.v1.UpdateSearchConfigRequest)
UpdateSearchConfigRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateSearchConfigRequest.newBuilder() to construct.
private UpdateSearchConfigRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateSearchConfigRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateSearchConfigRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.visionai.v1.WarehouseProto
.internal_static_google_cloud_visionai_v1_UpdateSearchConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.visionai.v1.WarehouseProto
.internal_static_google_cloud_visionai_v1_UpdateSearchConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.visionai.v1.UpdateSearchConfigRequest.class,
com.google.cloud.visionai.v1.UpdateSearchConfigRequest.Builder.class);
}
private int bitField0_;
public static final int SEARCH_CONFIG_FIELD_NUMBER = 1;
private com.google.cloud.visionai.v1.SearchConfig searchConfig_;
/**
*
*
* <pre>
* Required. The search configuration to update.
*
* The search configuration's `name` field is used to identify the resource to
* be updated. Format:
* `projects/{project_number}/locations/{location}/corpora/{corpus}/searchConfigs/{search_config}`
* </pre>
*
* <code>
* .google.cloud.visionai.v1.SearchConfig search_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the searchConfig field is set.
*/
@java.lang.Override
public boolean hasSearchConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The search configuration to update.
*
* The search configuration's `name` field is used to identify the resource to
* be updated. Format:
* `projects/{project_number}/locations/{location}/corpora/{corpus}/searchConfigs/{search_config}`
* </pre>
*
* <code>
* .google.cloud.visionai.v1.SearchConfig search_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The searchConfig.
*/
@java.lang.Override
public com.google.cloud.visionai.v1.SearchConfig getSearchConfig() {
return searchConfig_ == null
? com.google.cloud.visionai.v1.SearchConfig.getDefaultInstance()
: searchConfig_;
}
/**
*
*
* <pre>
* Required. The search configuration to update.
*
* The search configuration's `name` field is used to identify the resource to
* be updated. Format:
* `projects/{project_number}/locations/{location}/corpora/{corpus}/searchConfigs/{search_config}`
* </pre>
*
* <code>
* .google.cloud.visionai.v1.SearchConfig search_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.visionai.v1.SearchConfigOrBuilder getSearchConfigOrBuilder() {
return searchConfig_ == null
? com.google.cloud.visionai.v1.SearchConfig.getDefaultInstance()
: searchConfig_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* The list of fields to be updated. If left unset, all field paths will be
* updated/overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The list of fields to be updated. If left unset, all field paths will be
* updated/overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* The list of fields to be updated. If left unset, all field paths will be
* updated/overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getSearchConfig());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getSearchConfig());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.visionai.v1.UpdateSearchConfigRequest)) {
return super.equals(obj);
}
com.google.cloud.visionai.v1.UpdateSearchConfigRequest other =
(com.google.cloud.visionai.v1.UpdateSearchConfigRequest) obj;
if (hasSearchConfig() != other.hasSearchConfig()) return false;
if (hasSearchConfig()) {
if (!getSearchConfig().equals(other.getSearchConfig())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasSearchConfig()) {
hash = (37 * hash) + SEARCH_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getSearchConfig().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.visionai.v1.UpdateSearchConfigRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.UpdateSearchConfigRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.UpdateSearchConfigRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.UpdateSearchConfigRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.UpdateSearchConfigRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.UpdateSearchConfigRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.UpdateSearchConfigRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.UpdateSearchConfigRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.visionai.v1.UpdateSearchConfigRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.UpdateSearchConfigRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.visionai.v1.UpdateSearchConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.UpdateSearchConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.visionai.v1.UpdateSearchConfigRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for UpdateSearchConfig.
* </pre>
*
* Protobuf type {@code google.cloud.visionai.v1.UpdateSearchConfigRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.visionai.v1.UpdateSearchConfigRequest)
com.google.cloud.visionai.v1.UpdateSearchConfigRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.visionai.v1.WarehouseProto
.internal_static_google_cloud_visionai_v1_UpdateSearchConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.visionai.v1.WarehouseProto
.internal_static_google_cloud_visionai_v1_UpdateSearchConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.visionai.v1.UpdateSearchConfigRequest.class,
com.google.cloud.visionai.v1.UpdateSearchConfigRequest.Builder.class);
}
// Construct using com.google.cloud.visionai.v1.UpdateSearchConfigRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getSearchConfigFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
searchConfig_ = null;
if (searchConfigBuilder_ != null) {
searchConfigBuilder_.dispose();
searchConfigBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.visionai.v1.WarehouseProto
.internal_static_google_cloud_visionai_v1_UpdateSearchConfigRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.visionai.v1.UpdateSearchConfigRequest getDefaultInstanceForType() {
return com.google.cloud.visionai.v1.UpdateSearchConfigRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.visionai.v1.UpdateSearchConfigRequest build() {
com.google.cloud.visionai.v1.UpdateSearchConfigRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.visionai.v1.UpdateSearchConfigRequest buildPartial() {
com.google.cloud.visionai.v1.UpdateSearchConfigRequest result =
new com.google.cloud.visionai.v1.UpdateSearchConfigRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.visionai.v1.UpdateSearchConfigRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.searchConfig_ =
searchConfigBuilder_ == null ? searchConfig_ : searchConfigBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.visionai.v1.UpdateSearchConfigRequest) {
return mergeFrom((com.google.cloud.visionai.v1.UpdateSearchConfigRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.visionai.v1.UpdateSearchConfigRequest other) {
if (other == com.google.cloud.visionai.v1.UpdateSearchConfigRequest.getDefaultInstance())
return this;
if (other.hasSearchConfig()) {
mergeSearchConfig(other.getSearchConfig());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getSearchConfigFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.visionai.v1.SearchConfig searchConfig_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.visionai.v1.SearchConfig,
com.google.cloud.visionai.v1.SearchConfig.Builder,
com.google.cloud.visionai.v1.SearchConfigOrBuilder>
searchConfigBuilder_;
/**
*
*
* <pre>
* Required. The search configuration to update.
*
* The search configuration's `name` field is used to identify the resource to
* be updated. Format:
* `projects/{project_number}/locations/{location}/corpora/{corpus}/searchConfigs/{search_config}`
* </pre>
*
* <code>
* .google.cloud.visionai.v1.SearchConfig search_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the searchConfig field is set.
*/
public boolean hasSearchConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The search configuration to update.
*
* The search configuration's `name` field is used to identify the resource to
* be updated. Format:
* `projects/{project_number}/locations/{location}/corpora/{corpus}/searchConfigs/{search_config}`
* </pre>
*
* <code>
* .google.cloud.visionai.v1.SearchConfig search_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The searchConfig.
*/
public com.google.cloud.visionai.v1.SearchConfig getSearchConfig() {
if (searchConfigBuilder_ == null) {
return searchConfig_ == null
? com.google.cloud.visionai.v1.SearchConfig.getDefaultInstance()
: searchConfig_;
} else {
return searchConfigBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The search configuration to update.
*
* The search configuration's `name` field is used to identify the resource to
* be updated. Format:
* `projects/{project_number}/locations/{location}/corpora/{corpus}/searchConfigs/{search_config}`
* </pre>
*
* <code>
* .google.cloud.visionai.v1.SearchConfig search_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setSearchConfig(com.google.cloud.visionai.v1.SearchConfig value) {
if (searchConfigBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
searchConfig_ = value;
} else {
searchConfigBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The search configuration to update.
*
* The search configuration's `name` field is used to identify the resource to
* be updated. Format:
* `projects/{project_number}/locations/{location}/corpora/{corpus}/searchConfigs/{search_config}`
* </pre>
*
* <code>
* .google.cloud.visionai.v1.SearchConfig search_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setSearchConfig(
com.google.cloud.visionai.v1.SearchConfig.Builder builderForValue) {
if (searchConfigBuilder_ == null) {
searchConfig_ = builderForValue.build();
} else {
searchConfigBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The search configuration to update.
*
* The search configuration's `name` field is used to identify the resource to
* be updated. Format:
* `projects/{project_number}/locations/{location}/corpora/{corpus}/searchConfigs/{search_config}`
* </pre>
*
* <code>
* .google.cloud.visionai.v1.SearchConfig search_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeSearchConfig(com.google.cloud.visionai.v1.SearchConfig value) {
if (searchConfigBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& searchConfig_ != null
&& searchConfig_ != com.google.cloud.visionai.v1.SearchConfig.getDefaultInstance()) {
getSearchConfigBuilder().mergeFrom(value);
} else {
searchConfig_ = value;
}
} else {
searchConfigBuilder_.mergeFrom(value);
}
if (searchConfig_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The search configuration to update.
*
* The search configuration's `name` field is used to identify the resource to
* be updated. Format:
* `projects/{project_number}/locations/{location}/corpora/{corpus}/searchConfigs/{search_config}`
* </pre>
*
* <code>
* .google.cloud.visionai.v1.SearchConfig search_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearSearchConfig() {
bitField0_ = (bitField0_ & ~0x00000001);
searchConfig_ = null;
if (searchConfigBuilder_ != null) {
searchConfigBuilder_.dispose();
searchConfigBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The search configuration to update.
*
* The search configuration's `name` field is used to identify the resource to
* be updated. Format:
* `projects/{project_number}/locations/{location}/corpora/{corpus}/searchConfigs/{search_config}`
* </pre>
*
* <code>
* .google.cloud.visionai.v1.SearchConfig search_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.visionai.v1.SearchConfig.Builder getSearchConfigBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getSearchConfigFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The search configuration to update.
*
* The search configuration's `name` field is used to identify the resource to
* be updated. Format:
* `projects/{project_number}/locations/{location}/corpora/{corpus}/searchConfigs/{search_config}`
* </pre>
*
* <code>
* .google.cloud.visionai.v1.SearchConfig search_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.visionai.v1.SearchConfigOrBuilder getSearchConfigOrBuilder() {
if (searchConfigBuilder_ != null) {
return searchConfigBuilder_.getMessageOrBuilder();
} else {
return searchConfig_ == null
? com.google.cloud.visionai.v1.SearchConfig.getDefaultInstance()
: searchConfig_;
}
}
/**
*
*
* <pre>
* Required. The search configuration to update.
*
* The search configuration's `name` field is used to identify the resource to
* be updated. Format:
* `projects/{project_number}/locations/{location}/corpora/{corpus}/searchConfigs/{search_config}`
* </pre>
*
* <code>
* .google.cloud.visionai.v1.SearchConfig search_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.visionai.v1.SearchConfig,
com.google.cloud.visionai.v1.SearchConfig.Builder,
com.google.cloud.visionai.v1.SearchConfigOrBuilder>
getSearchConfigFieldBuilder() {
if (searchConfigBuilder_ == null) {
searchConfigBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.visionai.v1.SearchConfig,
com.google.cloud.visionai.v1.SearchConfig.Builder,
com.google.cloud.visionai.v1.SearchConfigOrBuilder>(
getSearchConfig(), getParentForChildren(), isClean());
searchConfig_ = null;
}
return searchConfigBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* The list of fields to be updated. If left unset, all field paths will be
* updated/overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The list of fields to be updated. If left unset, all field paths will be
* updated/overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The list of fields to be updated. If left unset, all field paths will be
* updated/overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of fields to be updated. If left unset, all field paths will be
* updated/overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of fields to be updated. If left unset, all field paths will be
* updated/overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The list of fields to be updated. If left unset, all field paths will be
* updated/overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of fields to be updated. If left unset, all field paths will be
* updated/overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The list of fields to be updated. If left unset, all field paths will be
* updated/overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* The list of fields to be updated. If left unset, all field paths will be
* updated/overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.visionai.v1.UpdateSearchConfigRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.visionai.v1.UpdateSearchConfigRequest)
private static final com.google.cloud.visionai.v1.UpdateSearchConfigRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.visionai.v1.UpdateSearchConfigRequest();
}
public static com.google.cloud.visionai.v1.UpdateSearchConfigRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateSearchConfigRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateSearchConfigRequest>() {
@java.lang.Override
public UpdateSearchConfigRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateSearchConfigRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateSearchConfigRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.visionai.v1.UpdateSearchConfigRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/ignite-3 | 36,108 | modules/page-memory/src/main/java/org/apache/ignite/internal/pagememory/persistence/checkpoint/Checkpointer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.pagememory.persistence.checkpoint;
import static java.lang.Math.max;
import static java.lang.System.nanoTime;
import static java.util.concurrent.TimeUnit.MICROSECONDS;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.MINUTES;
import static java.util.concurrent.TimeUnit.NANOSECONDS;
import static org.apache.ignite.internal.failure.FailureType.CRITICAL_ERROR;
import static org.apache.ignite.internal.failure.FailureType.SYSTEM_WORKER_TERMINATION;
import static org.apache.ignite.internal.pagememory.persistence.checkpoint.CheckpointReadWriteLock.CHECKPOINT_RUNNER_THREAD_PREFIX;
import static org.apache.ignite.internal.pagememory.persistence.checkpoint.CheckpointState.LOCK_TAKEN;
import static org.apache.ignite.internal.pagememory.persistence.checkpoint.CheckpointState.PAGES_SNAPSHOT_TAKEN;
import static org.apache.ignite.internal.util.FastTimestamps.coarseCurrentTimeMillis;
import static org.apache.ignite.internal.util.IgniteUtils.safeAbs;
import static org.apache.ignite.internal.util.IgniteUtils.shutdownAndAwaitTermination;
import java.io.IOException;
import java.nio.file.Path;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.atomic.LongAdder;
import java.util.concurrent.locks.Lock;
import java.util.function.BooleanSupplier;
import org.apache.ignite.internal.components.LogSyncer;
import org.apache.ignite.internal.components.LongJvmPauseDetector;
import org.apache.ignite.internal.failure.FailureContext;
import org.apache.ignite.internal.failure.FailureManager;
import org.apache.ignite.internal.lang.IgniteBiTuple;
import org.apache.ignite.internal.lang.IgniteInternalCheckedException;
import org.apache.ignite.internal.lang.IgniteInternalException;
import org.apache.ignite.internal.lang.NodeStoppingException;
import org.apache.ignite.internal.logger.IgniteLogger;
import org.apache.ignite.internal.logger.Loggers;
import org.apache.ignite.internal.pagememory.DataRegion;
import org.apache.ignite.internal.pagememory.configuration.CheckpointConfiguration;
import org.apache.ignite.internal.pagememory.persistence.GroupPartitionId;
import org.apache.ignite.internal.pagememory.persistence.PartitionDestructionLockManager;
import org.apache.ignite.internal.pagememory.persistence.PartitionMeta;
import org.apache.ignite.internal.pagememory.persistence.PartitionMetaManager;
import org.apache.ignite.internal.pagememory.persistence.PersistentPageMemory;
import org.apache.ignite.internal.pagememory.persistence.WriteSpeedFormatter;
import org.apache.ignite.internal.pagememory.persistence.compaction.Compactor;
import org.apache.ignite.internal.pagememory.persistence.store.DeltaFilePageStoreIo;
import org.apache.ignite.internal.pagememory.persistence.store.FilePageStore;
import org.apache.ignite.internal.pagememory.persistence.store.FilePageStoreManager;
import org.apache.ignite.internal.thread.IgniteThread;
import org.apache.ignite.internal.util.IgniteConcurrentMultiPairQueue;
import org.apache.ignite.internal.util.worker.IgniteWorker;
import org.apache.ignite.internal.util.worker.WorkProgressDispatcher;
import org.jetbrains.annotations.Nullable;
/**
* Checkpointer object is used for notification on checkpoint begin, predicate is {@code nextCheckpointTimestamps - now > 0}.
*
* <p>Method {@link #scheduleCheckpoint} uses {@link Object#notifyAll()}, {@link #waitCheckpointEvent} uses {@link Object#wait(long)}.
*
* <p>Checkpointer is one threaded which means that only one checkpoint at the one moment possible.
*
* <p>Responsiblity:
* <ul>
* <li>Provide the API for schedule/trigger the checkpoint.</li>
* <li>Schedule new checkpoint after current one according to checkpoint interval.</li>
* <li>Failure handling.</li>
* <li>Managing of page write threads.</li>
* <li>Logging and metrics of checkpoint.</li>
* </ul>
*
* <p>Checkpointer steps:
* <ul>
* <li>Awaiting checkpoint event.</li>
* <li>Collect all dirty pages from page memory under checkpoint write lock.</li>
* <li>Start to write dirty pages to page store.</li>
* <li>Finish the checkpoint.
* </ul>
*/
// TODO: IGNITE-26593 Fix the counting and output of the written dirty pages metric
public class Checkpointer extends IgniteWorker {
private static final String CHECKPOINT_STARTED_LOG_TEMPLATE = "Checkpoint started ["
+ "checkpointId={}, "
+ "beforeWriteLockTime={}ms, "
+ "writeLockWait={}us, "
+ "listenersExecuteTime={}us, "
+ "writeLockHoldTime={}us, "
+ "splitAndSortPagesDuration={}ms, "
+ "{}"
+ "pages={}, "
+ "reason='{}']";
private static final String CHECKPOINT_SKIPPED_LOG_TEMPLATE = "Skipping checkpoint (no pages were modified) ["
+ "beforeWriteLockTime={}ms, "
+ "writeLockWait={}us, "
+ "listenersExecuteTime={}us, "
+ "writeLockHoldTime={}us, reason='{}']";
private static final String CHECKPOINT_FINISHED_LOG_TEMPLATE = "Checkpoint finished ["
+ "checkpointId={}, "
+ "pages={}, "
+ "pagesWriteTime={}ms, "
+ "fsyncTime={}ms, "
+ "replicatorLogSyncTime={}ms, "
+ "waitCompletePageReplacementTime={}ms, "
+ "totalTime={}ms, "
+ "avgWriteSpeed={}MB/s]";
/** Logger. */
private static final IgniteLogger LOG = Loggers.forClass(Checkpointer.class);
/** Pause detector. */
private final @Nullable LongJvmPauseDetector pauseDetector;
/** Page size. */
private final int pageSize;
/** Checkpoint config. */
private final CheckpointConfiguration checkpointConfig;
/** Strategy of where and how to get the pages. */
private final CheckpointWorkflow checkpointWorkflow;
/** Factory for the creation of page-write workers. */
private final CheckpointPagesWriterFactory checkpointPagesWriterFactory;
/** Checkpoint runner thread pool. If {@code null} tasks are to be run in single thread. */
private final @Nullable ThreadPoolExecutor checkpointWritePagesPool;
/** Partition meta manager. */
private final PartitionMetaManager partitionMetaManager;
/** Next scheduled checkpoint progress. */
private volatile CheckpointProgressImpl scheduledCheckpointProgress;
/** Current checkpoint progress. This field is updated only by checkpoint thread. */
private volatile @Nullable CheckpointProgressImpl currentCheckpointProgress;
/**
* Checkpoint progress instance with a more limited range of visibility. It is initialized when checkpoint write lick is acquired, and
* nullified when checkpoint finishes (unlike {@link #currentCheckpointProgress} that is updated before we started notifying checkpoint
* listeners and is never nullified).
*/
private volatile @Nullable CheckpointProgressImpl currentCheckpointProgressForThrottling;
/** Checkpoint progress after releasing write lock. */
private volatile @Nullable CheckpointProgressImpl afterReleaseWriteLockCheckpointProgress;
/** Shutdown now. */
private volatile boolean shutdownNow;
/** Last checkpoint timestamp, read/update only in checkpoint thread. */
private long lastCheckpointTimestamp;
/** File page store manager. */
private final FilePageStoreManager filePageStoreManager;
/** Delta file compactor. */
private final Compactor compactor;
/** Failure processor. */
private final FailureManager failureManager;
private final LogSyncer logSyncer;
private final PartitionDestructionLockManager partitionDestructionLockManager;
private final CheckpointMetrics checkpointMetrics;
/**
* Constructor.
*
* @param igniteInstanceName Name of the Ignite instance.
* @param detector Long JVM pause detector.
* @param failureManager Failure processor that is used to handle critical errors.
* @param checkpointWorkFlow Implementation of checkpoint.
* @param factory Page writer factory.
* @param filePageStoreManager File page store manager.
* @param compactor Delta file compactor.
* @param pageSize Page size.
* @param checkpointConfig Checkpoint configuration.
* @param logSyncer Write-ahead log synchronizer.
* @param partitionDestructionLockManager Partition Destruction Lock Manager.
*/
Checkpointer(
String igniteInstanceName,
@Nullable LongJvmPauseDetector detector,
FailureManager failureManager,
CheckpointWorkflow checkpointWorkFlow,
CheckpointPagesWriterFactory factory,
FilePageStoreManager filePageStoreManager,
PartitionMetaManager partitionMetaManager,
Compactor compactor,
int pageSize,
CheckpointConfiguration checkpointConfig,
LogSyncer logSyncer,
PartitionDestructionLockManager partitionDestructionLockManager,
CheckpointMetricSource checkpointMetricSource
) {
super(LOG, igniteInstanceName, "checkpoint-thread");
this.pauseDetector = detector;
this.pageSize = pageSize;
this.checkpointConfig = checkpointConfig;
this.checkpointWorkflow = checkpointWorkFlow;
this.checkpointPagesWriterFactory = factory;
this.filePageStoreManager = filePageStoreManager;
this.compactor = compactor;
this.failureManager = failureManager;
this.logSyncer = logSyncer;
this.partitionMetaManager = partitionMetaManager;
this.partitionDestructionLockManager = partitionDestructionLockManager;
scheduledCheckpointProgress = new CheckpointProgressImpl(MILLISECONDS.toNanos(nextCheckpointInterval()));
int checkpointWritePageThreads = checkpointConfig.checkpointThreads();
if (checkpointWritePageThreads > 1) {
checkpointWritePagesPool = new ThreadPoolExecutor(
checkpointWritePageThreads,
checkpointWritePageThreads,
0L,
MILLISECONDS,
new LinkedBlockingQueue<>(),
IgniteCheckpointThreadFactory.create(igniteInstanceName, CHECKPOINT_RUNNER_THREAD_PREFIX + "-io", false, log)
);
} else {
checkpointWritePagesPool = null;
}
checkpointMetrics = new CheckpointMetrics(checkpointMetricSource);
}
@Override
protected void body() {
try {
while (!isCancelled()) {
waitCheckpointEvent();
if (isCancelled() || shutdownNow) {
log.info("Skipping last checkpoint because node is stopping");
return;
}
doCheckpoint();
}
// Final run after the cancellation.
if (!shutdownNow) {
doCheckpoint();
}
if (!isCancelled.get()) {
throw new IllegalStateException("Thread is terminated unexpectedly: " + name());
}
scheduledCheckpointProgress.fail(new NodeStoppingException("Node is stopping."));
} catch (Throwable t) {
scheduledCheckpointProgress.fail(t);
// We need to handle OutOfMemoryError and the rest in different ways
if (t instanceof OutOfMemoryError) {
failureManager.process(new FailureContext(CRITICAL_ERROR, t));
} else {
failureManager.process(new FailureContext(SYSTEM_WORKER_TERMINATION, t));
}
throw new IgniteInternalException(t);
}
}
/**
* Changes the information for a scheduled checkpoint if it was scheduled further than {@code delayFromNow}, or do nothing otherwise.
*
* @param delayFromNow Delay from now in milliseconds.
* @param reason Wakeup reason.
* @return Nearest scheduled checkpoint which is not started yet (dirty pages weren't collected yet).
*/
public CheckpointProgress scheduleCheckpoint(long delayFromNow, String reason) {
CheckpointProgressImpl current = currentCheckpointProgress;
// If checkpoint haven't taken write lock yet it shouldn't trigger a new checkpoint but should return current one.
if (current != null && !current.greaterOrEqualTo(LOCK_TAKEN)) {
return current;
}
current = scheduledCheckpointProgress;
long nextNanos = nanoTime() + MILLISECONDS.toNanos(delayFromNow);
if (current.nextCheckpointNanos() - nextNanos <= 0) {
return current;
}
synchronized (this) {
current = scheduledCheckpointProgress;
if (current.nextCheckpointNanos() - nextNanos > 0) {
current.reason(reason);
current.nextCheckpointNanos(MILLISECONDS.toNanos(delayFromNow));
}
notifyAll();
}
return current;
}
/**
* Marks partition as dirty, forcing partition's meta-page to be written on disk during next checkpoint.
*/
void markPartitionAsDirty(DataRegion<?> dataRegion, int groupId, int partitionId, int partitionGeneration) {
checkpointWorkflow.markPartitionAsDirty(dataRegion, groupId, partitionId, partitionGeneration);
}
/**
* Executes a checkpoint.
*
* @throws IgniteInternalCheckedException If failed.
*/
void doCheckpoint() throws IgniteInternalCheckedException {
Checkpoint chp = null;
try {
var tracker = new CheckpointMetricsTracker();
tracker.onCheckpointStart();
CheckpointProgressImpl currentCheckpointProgress = startCheckpointProgress();
try {
chp = checkpointWorkflow.markCheckpointBegin(
lastCheckpointTimestamp,
currentCheckpointProgress,
tracker,
this::updateHeartbeat,
this::updateLastProgressAfterReleaseWriteLock
);
} catch (Exception e) {
if (currentCheckpointProgress != null) {
currentCheckpointProgress.fail(e);
}
// In case of checkpoint initialization error node should be invalidated and stopped.
failureManager.process(new FailureContext(CRITICAL_ERROR, e));
// Re-throw as unchecked exception to force stopping checkpoint thread.
throw new IgniteInternalCheckedException(e);
}
updateHeartbeat();
if (chp.hasDelta()) {
if (log.isInfoEnabled()) {
long possibleJvmPauseDuration = possibleLongJvmPauseDuration(tracker);
if (log.isInfoEnabled()) {
log.info(
CHECKPOINT_STARTED_LOG_TEMPLATE,
chp.progress.id(),
tracker.beforeWriteLockDuration(MILLISECONDS),
tracker.writeLockWaitDuration(MICROSECONDS),
tracker.onMarkCheckpointBeginDuration(MICROSECONDS),
tracker.writeLockHoldDuration(MICROSECONDS),
tracker.splitAndSortCheckpointPagesDuration(MILLISECONDS),
possibleJvmPauseDuration > 0 ? "possibleJvmPauseDuration=" + possibleJvmPauseDuration + "ms, " : "",
chp.dirtyPagesSize,
chp.progress.reason()
);
}
}
replicatorLogSync(tracker);
if (!writePages(tracker, chp.dirtyPages, chp.progress, this, this::isShutdownNow)) {
return;
}
} else {
if (log.isInfoEnabled()) {
log.info(
CHECKPOINT_SKIPPED_LOG_TEMPLATE,
tracker.beforeWriteLockDuration(MILLISECONDS),
tracker.writeLockWaitDuration(MICROSECONDS),
tracker.onMarkCheckpointBeginDuration(MICROSECONDS),
tracker.writeLockHoldDuration(MICROSECONDS),
chp.progress.reason()
);
}
}
currentCheckpointProgress.setPagesWriteTimeMillis(
tracker.pagesWriteDuration(MILLISECONDS) + tracker.splitAndSortCheckpointPagesDuration(MILLISECONDS)
);
currentCheckpointProgress.setFsyncTimeMillis(tracker.fsyncDuration(MILLISECONDS));
// Must mark successful checkpoint only if there are no exceptions or interrupts.
checkpointWorkflow.markCheckpointEnd(chp);
tracker.onCheckpointEnd();
if (chp.hasDelta()) {
if (log.isInfoEnabled()) {
long totalWriteBytes = (long) pageSize * chp.dirtyPagesSize;
long totalDurationInNanos = tracker.checkpointDuration(NANOSECONDS);
log.info(
CHECKPOINT_FINISHED_LOG_TEMPLATE,
chp.progress.id(),
chp.dirtyPagesSize,
tracker.pagesWriteDuration(MILLISECONDS),
tracker.fsyncDuration(MILLISECONDS),
tracker.replicatorLogSyncDuration(MILLISECONDS),
tracker.waitPageReplacementDuration(MILLISECONDS),
tracker.checkpointDuration(MILLISECONDS),
WriteSpeedFormatter.formatWriteSpeed(totalWriteBytes, totalDurationInNanos)
);
}
}
checkpointMetrics.update(tracker, chp.dirtyPagesSize);
} catch (IgniteInternalCheckedException e) {
if (chp != null) {
chp.progress.fail(e);
}
failureManager.process(new FailureContext(CRITICAL_ERROR, e));
throw e;
} finally {
currentCheckpointProgressForThrottling = null;
}
}
/**
* Writes dirty pages to the appropriate stores.
*
* @param tracker Checkpoint metrics tracker.
* @param checkpointDirtyPages Checkpoint dirty pages to write.
* @param currentCheckpointProgress Current checkpoint progress.
* @param workProgressDispatcher Work progress dispatcher.
* @param shutdownNow Checker of stop operation.
* @throws IgniteInternalCheckedException If failed.
*/
private boolean writePages(
CheckpointMetricsTracker tracker,
CheckpointDirtyPages checkpointDirtyPages,
CheckpointProgressImpl currentCheckpointProgress,
WorkProgressDispatcher workProgressDispatcher,
BooleanSupplier shutdownNow
) throws IgniteInternalCheckedException {
ThreadPoolExecutor pageWritePool = checkpointWritePagesPool;
int checkpointWritePageThreads = pageWritePool == null ? 1 : pageWritePool.getMaximumPoolSize();
// Updated partitions.
ConcurrentMap<GroupPartitionId, LongAdder> updatedPartitions = new ConcurrentHashMap<>();
CompletableFuture<?>[] futures = new CompletableFuture[checkpointWritePageThreads];
tracker.onPagesWriteStart();
List<PersistentPageMemory> pageMemoryList = checkpointDirtyPages.dirtyPageMemoryInstances();
IgniteConcurrentMultiPairQueue<PersistentPageMemory, GroupPartitionId> dirtyPartitionQueue
= checkpointDirtyPages.toDirtyPartitionQueue();
for (int i = 0; i < checkpointWritePageThreads; i++) {
CheckpointPagesWriter write = checkpointPagesWriterFactory.build(
tracker,
dirtyPartitionQueue,
pageMemoryList,
updatedPartitions,
futures[i] = new CompletableFuture<>(),
workProgressDispatcher::updateHeartbeat,
currentCheckpointProgress,
shutdownNow
);
if (pageWritePool == null) {
write.run();
} else {
pageWritePool.execute(write);
}
}
workProgressDispatcher.updateHeartbeat();
// Wait and check for errors.
CompletableFuture.allOf(futures).join();
tracker.onPagesWriteEnd();
// Must re-check shutdown flag here because threads may have skipped some pages because of it.
// If so, we should not finish checkpoint.
if (shutdownNow.getAsBoolean()) {
currentCheckpointProgress.fail(new NodeStoppingException("Node is stopping."));
return false;
}
tracker.onWaitPageReplacementStart();
// Waiting for the completion of all page replacements if present.
// Will complete normally or with the first error on one of the page replacements.
// join() is used intentionally as above.
currentCheckpointProgress.getUnblockFsyncOnPageReplacementFuture().join();
tracker.onWaitPageReplacementEnd();
// Must re-check shutdown flag here because threads could take a long time to complete the page replacement.
// If so, we should not finish checkpoint.
if (shutdownNow.getAsBoolean()) {
currentCheckpointProgress.fail(new NodeStoppingException("Node is stopping."));
return false;
}
tracker.onFsyncStart();
syncUpdatedPageStores(updatedPartitions, currentCheckpointProgress);
tracker.onFsyncEnd();
compactor.triggerCompaction();
if (shutdownNow.getAsBoolean()) {
currentCheckpointProgress.fail(new NodeStoppingException("Node is stopping."));
return false;
}
return true;
}
private void syncUpdatedPageStores(
ConcurrentMap<GroupPartitionId, LongAdder> updatedPartitions,
CheckpointProgressImpl currentCheckpointProgress
) throws IgniteInternalCheckedException {
ThreadPoolExecutor pageWritePool = checkpointWritePagesPool;
if (pageWritePool == null) {
for (Map.Entry<GroupPartitionId, LongAdder> entry : updatedPartitions.entrySet()) {
if (shutdownNow) {
return;
}
fsyncPartitionFiles(currentCheckpointProgress, entry.getKey(), entry.getValue());
}
} else {
int checkpointThreads = pageWritePool.getMaximumPoolSize();
CompletableFuture<?>[] futures = new CompletableFuture[checkpointThreads];
for (int i = 0; i < checkpointThreads; i++) {
futures[i] = new CompletableFuture<>();
}
BlockingQueue<Entry<GroupPartitionId, LongAdder>> queue = new LinkedBlockingQueue<>(updatedPartitions.entrySet());
for (int i = 0; i < checkpointThreads; i++) {
int threadIdx = i;
pageWritePool.execute(() -> {
Map.Entry<GroupPartitionId, LongAdder> entry = queue.poll();
try {
while (entry != null) {
if (shutdownNow) {
break;
}
fsyncPartitionFiles(currentCheckpointProgress, entry.getKey(), entry.getValue());
entry = queue.poll();
}
futures[threadIdx].complete(null);
} catch (Throwable t) {
futures[threadIdx].completeExceptionally(t);
}
});
}
blockingSectionBegin();
try {
CompletableFuture.allOf(futures).join();
} finally {
blockingSectionEnd();
}
}
}
private void fsyncPartitionFiles(
CheckpointProgressImpl currentCheckpointProgress,
GroupPartitionId partitionId,
LongAdder pagesWritten
) throws IgniteInternalCheckedException {
FilePageStore filePageStore = filePageStoreManager.getStore(partitionId);
if (filePageStore == null || filePageStore.isMarkedToDestroy()) {
return;
}
Lock partitionDestructionLock = partitionDestructionLockManager.destructionLock(partitionId).readLock();
partitionDestructionLock.lock();
try {
PartitionMeta meta = partitionMetaManager.getMeta(partitionId);
// If this happens, then the partition is destroyed.
if (meta == null) {
return;
}
fsyncDeltaFilePageStoreOnCheckpointThread(filePageStore);
fsyncFilePageStoreOnCheckpointThread(filePageStore);
renameDeltaFileOnCheckpointThread(filePageStore, partitionId);
filePageStore.checkpointedPageCount(meta.metaSnapshot(currentCheckpointProgress.id()).pageCount());
currentCheckpointProgress.syncedPagesCounter().addAndGet(pagesWritten.intValue());
} finally {
partitionDestructionLock.unlock();
}
}
private void fsyncFilePageStoreOnCheckpointThread(FilePageStore filePageStore) throws IgniteInternalCheckedException {
blockingSectionBegin();
try {
filePageStore.sync();
} finally {
blockingSectionEnd();
}
}
/**
* Waiting until the next checkpoint time.
*/
void waitCheckpointEvent() {
try {
synchronized (this) {
long remaining = NANOSECONDS.toMillis(scheduledCheckpointProgress.nextCheckpointNanos() - nanoTime());
while (remaining > 0 && !isCancelled()) {
blockingSectionBegin();
try {
wait(remaining);
remaining = NANOSECONDS.toMillis(scheduledCheckpointProgress.nextCheckpointNanos() - nanoTime());
} finally {
blockingSectionEnd();
}
}
}
} catch (InterruptedException ignored) {
Thread.currentThread().interrupt();
isCancelled.set(true);
}
}
/**
* Returns duration of possible JVM pause, if it was detected, or {@code -1} otherwise.
*
* @param tracker Checkpoint metrics tracker.
*/
private long possibleLongJvmPauseDuration(CheckpointMetricsTracker tracker) {
if (pauseDetector != null) {
long lockDuration = tracker.writeLockWaitDuration(MILLISECONDS) + tracker.writeLockHoldDuration(MILLISECONDS);
if (lockDuration > pauseDetector.longJvmPauseThreshold()) {
long now = coarseCurrentTimeMillis();
// We must get last wake-up time before search possible pause in events map.
long wakeUpTime = pauseDetector.getLastWakeUpTime();
IgniteBiTuple<Long, Long> lastLongPause = pauseDetector.getLastLongPause();
if (lastLongPause != null && tracker.checkpointStartTime() < lastLongPause.get1()) {
return lastLongPause.get2();
}
if (now - wakeUpTime > pauseDetector.longJvmPauseThreshold()) {
return now - wakeUpTime;
}
}
}
return -1L;
}
/**
* Update the current checkpoint info from the scheduled one.
*/
CheckpointProgressImpl startCheckpointProgress() {
long checkpointStartTimestamp = coarseCurrentTimeMillis();
// This can happen in an unlikely event of two checkpoints happening within a currentTimeMillis() granularity window.
if (checkpointStartTimestamp == lastCheckpointTimestamp) {
checkpointStartTimestamp++;
}
lastCheckpointTimestamp = checkpointStartTimestamp;
synchronized (this) {
CheckpointProgressImpl curr = scheduledCheckpointProgress;
if (curr.reason() == null) {
curr.reason("timeout");
}
// It is important that we assign a new progress object before checkpoint mark in page memory.
scheduledCheckpointProgress = new CheckpointProgressImpl(MILLISECONDS.toNanos(nextCheckpointInterval()));
currentCheckpointProgress = curr;
curr.futureFor(PAGES_SNAPSHOT_TAKEN).thenRun(() -> currentCheckpointProgressForThrottling = curr);
return curr;
}
}
@Override
@SuppressWarnings("NakedNotify")
public void cancel() {
if (log.isDebugEnabled()) {
log.debug("Cancelling grid runnable: " + this);
}
// Do not interrupt runner thread.
isCancelled.set(true);
synchronized (this) {
notifyAll();
}
}
/**
* Stopping all checkpoint activity immediately even if the current checkpoint is in progress.
*/
public void shutdownNow() {
shutdownNow = true;
if (!isCancelled.get()) {
cancel();
}
}
/**
* Starts the checkpointer.
*/
public void start() {
if (runner() != null) {
return;
}
assert runner() == null : "Checkpointer is running.";
new IgniteThread(this).start();
}
/**
* Stops the checkpointer.
*/
public void stop() throws Exception {
// Let's write the data.
shutdownCheckpointer(true);
}
/**
* Shutdown checkpointer.
*
* @param shutdown Shutdown flag.
*/
public void shutdownCheckpointer(boolean shutdown) {
if (shutdown) {
shutdownNow();
} else {
cancel();
}
try {
join();
} catch (InterruptedException ignore) {
log.info("Was interrupted while waiting for checkpointer shutdown, will not wait for checkpoint to finish");
Thread.currentThread().interrupt();
shutdownNow();
while (true) {
try {
join();
scheduledCheckpointProgress.fail(new NodeStoppingException("Checkpointer is stopped during node stop."));
break;
} catch (InterruptedException ignored) {
Thread.currentThread().interrupt();
}
}
Thread.currentThread().interrupt();
}
if (checkpointWritePagesPool != null) {
shutdownAndAwaitTermination(checkpointWritePagesPool, 2, MINUTES);
}
}
@Nullable CheckpointProgress currentCheckpointProgress() {
return currentCheckpointProgress;
}
public @Nullable CheckpointProgress currentCheckpointProgressForThrottling() {
return currentCheckpointProgressForThrottling;
}
/**
* Returns the progress of the last checkpoint, or the current checkpoint if in progress, {@code null} if no checkpoint has occurred.
*/
public @Nullable CheckpointProgress lastCheckpointProgress() {
// Because dirty pages may appear while holding write lock.
return afterReleaseWriteLockCheckpointProgress;
}
/**
* Returns progress of scheduled checkpoint.
*/
CheckpointProgress scheduledProgress() {
return scheduledCheckpointProgress;
}
/**
* Returns {@code true} if checkpoint should be stopped immediately.
*/
boolean isShutdownNow() {
return shutdownNow;
}
/**
* Gets a checkpoint interval with a randomized delay in mills.
*
* <p>It helps when the cluster makes a checkpoint in the same time in every node.
*/
long nextCheckpointInterval() {
long interval = checkpointConfig.intervalMillis();
int deviation = checkpointConfig.intervalDeviationPercent();
if (deviation == 0) {
return interval;
}
long deviationMillis = interval * deviation;
long startDelay = ThreadLocalRandom.current().nextLong(max(safeAbs(deviationMillis) / 100, 1))
- max(safeAbs(deviationMillis) / 200, 1);
return safeAbs(interval + startDelay);
}
private void fsyncDeltaFilePageStoreOnCheckpointThread(FilePageStore filePageStore) throws IgniteInternalCheckedException {
blockingSectionBegin();
try {
CompletableFuture<DeltaFilePageStoreIo> deltaFilePageStoreFuture = filePageStore.getNewDeltaFile();
if (deltaFilePageStoreFuture == null) {
return;
}
deltaFilePageStoreFuture.join().sync();
} finally {
blockingSectionEnd();
}
}
private void renameDeltaFileOnCheckpointThread(
FilePageStore filePageStore,
GroupPartitionId partitionId
) throws IgniteInternalCheckedException {
blockingSectionBegin();
try {
CompletableFuture<DeltaFilePageStoreIo> deltaFilePageStoreFuture = filePageStore.getNewDeltaFile();
if (deltaFilePageStoreFuture == null) {
return;
}
DeltaFilePageStoreIo deltaFilePageStoreIo = deltaFilePageStoreFuture.join();
Path newDeltaFilePath = filePageStoreManager.deltaFilePageStorePath(
partitionId.getGroupId(),
partitionId.getPartitionId(),
deltaFilePageStoreIo.fileIndex()
);
try {
deltaFilePageStoreIo.renameFilePath(newDeltaFilePath);
} catch (IOException e) {
throw new IgniteInternalCheckedException("Error when renaming delta file: " + deltaFilePageStoreIo.filePath(), e);
}
filePageStore.completeNewDeltaFile();
} finally {
blockingSectionEnd();
}
}
/**
* Updates the {@link #lastCheckpointProgress() latest progress} after write lock is released.
*/
void updateLastProgressAfterReleaseWriteLock() {
afterReleaseWriteLockCheckpointProgress = currentCheckpointProgress;
}
private void replicatorLogSync(CheckpointMetricsTracker tracker) throws IgniteInternalCheckedException {
try {
tracker.onReplicatorLogSyncStart();
logSyncer.sync();
tracker.onReplicatorLogSyncEnd();
} catch (Exception e) {
log.error("Failed to sync write-ahead log during checkpoint", e);
throw new IgniteInternalCheckedException(e);
}
}
}
|
apache/openjpa | 35,971 | openjpa-persistence/src/main/java/org/apache/openjpa/persistence/OpenJPAEntityManager.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.openjpa.persistence;
import java.util.Collection;
import java.util.EnumSet;
import java.util.Set;
import jakarta.persistence.EntityManager;
import jakarta.persistence.EntityTransaction;
import jakarta.persistence.LockModeType;
import jakarta.persistence.Query;
import org.apache.openjpa.event.CallbackModes;
import org.apache.openjpa.kernel.AutoClear;
import org.apache.openjpa.kernel.AutoDetach;
import org.apache.openjpa.kernel.ConnectionRetainModes;
import org.apache.openjpa.kernel.DetachState;
import org.apache.openjpa.kernel.RestoreState;
import org.apache.openjpa.persistence.criteria.OpenJPACriteriaBuilder;
import org.apache.openjpa.persistence.query.QueryDefinition;
/**
* Interface implemented by OpenJPA entity managers.
*
* This interface extends {@link EntityTransaction}, but this extension is
* deprecated.
*
* @since 0.4.0
* @author Abe White
* @published
*/
public interface OpenJPAEntityManager
extends EntityManager, EntityTransaction /* deprecated */ {
/**
* Return the factory that produced this entity manager.
*/
@Override OpenJPAEntityManagerFactory getEntityManagerFactory();
/**
* Return the (mutable) fetch plan for loading objects from this
* entity manager.
*/
FetchPlan getFetchPlan();
/**
* Pushes a new fetch plan that inherits from the current fetch plan onto
* a stack, and makes the new plan the active one.
*
* @since 1.1.0
* @return the new fetch plan
*/
FetchPlan pushFetchPlan();
/**
* Pops the fetch plan from the top of the stack, making the next one down
* the active one. This returns void to avoid confusion, since fetch plans
* tend to be used in method-chaining patterns often.
*
* @since 1.1.0
*/
void popFetchPlan();
/**
* Return the connection retain mode for this entity manager.
*/
ConnectionRetainMode getConnectionRetainMode();
/**
* Whether this entity manager is using managed transactions.
*
* @since 1.1.0
*/
boolean isTransactionManaged();
/**
* @deprecated use {@link #isTransactionManaged} instead to interrogate
* whether or not this EntityManager's transaction is managed. To determine
* if a given entity instance is managed, use {@link #contains}.
*/
@Deprecated boolean isManaged();
/**
* Whether to check for a global transaction upon every managed,
* non-transactional operation. Defaults to false.
*/
boolean getSyncWithManagedTransactions();
/**
* Whether to check for a global transaction upon every managed,
* non-transactional operation. Defaults to false.
*/
void setSyncWithManagedTransactions(boolean resync);
/**
* Return the current thread's class loader at the time this entity
* manager was obtained from the factory.
*/
ClassLoader getClassLoader();
/**
* Return the connection user name.
*/
String getConnectionUserName();
/**
* Return the connection password.
*/
String getConnectionPassword();
/**
* Whether the entity manager or its managed instances are used in a
* multithreaded environment.
*/
boolean getMultithreaded();
/**
* Whether the entity manager or its managed instances are used in a
* multithreaded environment.
*/
void setMultithreaded(boolean multi);
/**
* Whether to take into account changes in the transaction when executing
* a query or iterating an extent.
*/
boolean getIgnoreChanges();
/**
* Whether to take into account changes in the transaction when executing
* a query or iterating an extent.
*/
void setIgnoreChanges(boolean ignore);
/**
* Whether to allow nontransactional access to persistent state.
*/
boolean getNontransactionalRead();
/**
* Whether to allow nontransactional access to persistent state.
*/
void setNontransactionalRead(boolean read);
/**
* Whether to allow nontransactional changes to persistent state.
*/
boolean getNontransactionalWrite();
/**
* Whether to allow nontransactional changes to persistent state.
*/
void setNontransactionalWrite(boolean write);
/**
* Whether to use optimistic transactional semantics.
*/
boolean getOptimistic();
/**
* Whether to use optimistic transactional semantics.
*/
void setOptimistic(boolean opt);
/**
* Whether to restore an object's original state on rollback.
*/
RestoreStateType getRestoreState();
/**
* Whether to restore an object's original state on rollback.
*/
void setRestoreState(RestoreStateType restoreType);
/**
* Whether objects retain their persistent state on transaction commit.
*/
boolean getRetainState();
/**
* Whether objects retain their persistent state on transaction commit.
*/
void setRetainState(boolean retain);
/**
* Detach mode constant to determine which fields are part of the
* detached graph.
*/
DetachStateType getDetachState();
/**
* Detach mode constant to determine which fields are part of the
* detached graph.
*/
void setDetachState(DetachStateType type);
/**
* Whether to clear state when entering a transaction.
*/
AutoClearType getAutoClear();
/**
* Whether to clear state when entering a transaction.
*/
void setAutoClear(AutoClearType clearType);
/**
* {@link AutoDetachType} values which indicate when persistent
* managed objects should be automatically detached in-place.
*/
EnumSet<AutoDetachType> getAutoDetach();
/**
* {@link AutoDetachType} values which indicate when persistent
* managed objects should be automatically detached in-place.
* The current value is replaced in its entirety.
*/
void setAutoDetach(AutoDetachType value);
/**
* {@link AutoDetachType} values which indicate when persistent
* managed objects should be automatically detached in-place.
* The current value is replaced in its entirety.
*/
void setAutoDetach(EnumSet<AutoDetachType> values);
/**
* Bit flags marked in {@link AutoDetachType} which indicate when persistent
* managed objects should be automatically detached in-place.
*
* @since 1.1.0
*/
void setAutoDetach(AutoDetachType value, boolean on);
/**
* Whether to also evict an object from the store cache when it is
* evicted through this entity manager.
*/
boolean getEvictFromStoreCache();
/**
* Whether to also evict an object from the store cache when it is
* evicted through this entity manager.
*/
void setEvictFromStoreCache(boolean evict);
/**
* Whether objects accessed during this transaction will be added to the
* store cache. Defaults to true.
*
* @since 0.3.4
*/
boolean getPopulateStoreCache();
/**
* Whether to populate the store cache with objects used by this
* transaction. Defaults to true.
*
* @since 0.3.4
*/
void setPopulateStoreCache(boolean cache);
/**
* Whether memory usage is reduced during this transaction at the expense
* of tracking changes at the type level instead of the instance level,
* resulting in more aggressive cache invalidation.
*
* @since 1.0.0
*/
boolean isTrackChangesByType();
/**
* If a large number of objects will be created, modified, or deleted
* during this transaction setting this option to true will reduce memory
* usage if you perform periodic flushes by tracking changes at the type
* level instead of the instance level, resulting in more aggressive cache
* invalidation.
*
* @since 1.0.0
*/
void setTrackChangesByType(boolean track);
/**
* Put the specified key-value pair into the map of user objects. Use
* a value of null to remove the key.
*/
Object putUserObject(Object key, Object val);
/**
* Get the value for the specified key from the map of user objects.
*/
Object getUserObject(Object key);
///////////
// Lookups
///////////
/**
* Return the objects with the given oids.
*
* @param oids the oids of the objects to return
* @return the objects that were looked up, in the
* same order as the oids parameter.
* @see #find(Class,Object)
*/
<T> T[] findAll(Class<T> cls, Object... oids);
/**
* Return the objects with the given oids.
*
* @param oids the oids of the objects to return
* @return the objects that were looked up, in the
* same order as the oids parameter.
* @see #find(Class,Object)
*/
<T> Collection<T> findAll(Class<T> cls, Collection oids);
/**
* Return the cached instance for the given oid/object, or null if not
* cached.
*
* @param oid the object's id
* @return the cached object, or null if not cached
*/
<T> T findCached(Class<T> cls, Object oid);
/**
* Return the application identity class the given persistent class uses
* for object ids, or null if not a type that uses application identity.
*/
Class getObjectIdClass(Class pcClass);
////////////////
// Transactions
////////////////
@Override OpenJPAEntityTransaction getTransaction();
/**
* Set a transactional savepoint where operations after this savepoint
* will be rolled back.
*/
void setSavepoint(String name);
/**
* Rollback the current transaction to the last savepoint.
* Savepoints set after this one will become invalid.
*/
void rollbackToSavepoint();
/**
* Rollback the current transaction to the given savepoint name.
* Savepoints set after this one will become invalid.
*/
void rollbackToSavepoint(String name);
/**
* Release the last set savepoint and any resources associated with it.
* The given savepoint and any set after it will become invalid.
*/
void releaseSavepoint();
/**
* Release the savepoint and any resources associated with it.
* The given savepoint and any set after it will become invalid.
*/
void releaseSavepoint(String name);
/**
* Run pre-flush actions on transactional objects, including
* persistence-by-reachability, inverse relationship management,
* deletion of dependent instances, and instance callbacks.
* Transaction listeners are not invoked.
*
* @since 0.3.3
*/
void preFlush();
/**
* Validate the changes made in this transaction, reporting any optimistic
* violations, constraint violations, etc. In a datastore transaction or
* a flushed optimistic transaction, this method will act just like
* {@link #flush()}. In an optimistic transaction that has not yet begun a
* datastore-level transaction, however, it will only report exceptions
* that would occur on flush, without retaining any datastore resources.
*/
void validateChanges();
/**
* Whether a store transaction is active.
*/
boolean isStoreActive();
/**
* Begins a store transaction if one isn't already started. The
* entity manager must already be in a logical transaction.
*/
void beginStore();
////////////////////
// Object Lifecycle
////////////////////
/**
* Whether the given objects are managed.
*/
boolean containsAll(Object... pcs);
/**
* Whether the given objects are managed.
*/
boolean containsAll(Collection pcs);
/**
* Persist the given objects.
*/
void persistAll(Object... pcs);
/**
* Persist the given objects.
*/
void persistAll(Collection pcs);
/**
* Delete the given persistent objects.
*/
void removeAll(Object... pcs);
/**
* Delete the given persistent objects.
*/
void removeAll(Collection pcs);
/**
* Release the given object from management. This operation is not
* recursive.
*/
void release(Object pc);
/**
* Release the given object from management. This operation is not
* recursive.
*/
void releaseAll(Object... pcs);
/**
* Release the given objects from management. This operation is not
* recursive.
*/
void releaseAll(Collection pcs);
/**
* Immediately load the given object's persistent fields. One might
* use this action to make sure that an instance's fields are loaded
* before transitioning it to transient. Note that this action is not
* recursive. Any related objects that are loaded will not necessarily
* have their fields loaded.
*/
void retrieve(Object pc);
/**
* Retrieve the persistent state of the given objects.
*
* @see #retrieve
*/
void retrieveAll(Object... pcs);
/**
* Retrieve the persistent state of the given objects.
*
* @see #retrieve
*/
void retrieveAll(Collection pcs);
/**
* Refresh the state of the given objects.
*/
void refreshAll(Object... pcs);
/**
* Refresh the state of the given objects.
*/
void refreshAll(Collection pcs);
/**
* Refresh all transactional objects.
*/
void refreshAll();
/**
* <P> Evict the given object.</P>
* <P> Eviction acts as a hint to the persistence provider, and indicates that the persistent object is no longer
* needed by the application and may be garbage collected. It does not remove the object from the L1 cache and only
* affects objects which are managed and unmodified.
* </P>
* @param pc A persistent class which will be evicted
*/
void evict(Object pc);
/**
* <P>Evict the given objects.</P>
* <P> Eviction acts as a hint to the persistence provider, and indicates that the persistent object is no longer
* needed by the application and may be garbage collected. It does not remove the object from the L1 cache and only
* affects objects which are managed and unmodified.
* </P>
* @param pcs The persistent classes which will be evicted
*/
void evictAll(Object... pcs);
/**
* <P>Evict the given objects.</P>
* <P> Eviction acts as a hint to the persistence provider, and indicates that the persistent object is no longer
* needed by the application and may be garbage collected. It does not remove the object from the L1 cache and only
* affects objects which are managed and unmodified.
* </P>
* @param pcs A collection of persistent classes which will be evicted.
*/
void evictAll(Collection pcs);
/**
* <P>Evict all clean objects.</P>
* <P> Eviction acts as a hint to the persistence provider, and indicates that the persistent object is no longer
* needed by the application and may be garbage collected. It does not remove the object from the L1 cache and only
* affects objects which are managed and unmodified.
* </P>
*/
void evictAll();
/**
* <P>Evict all persistent-clean and persistent-nontransactional
* instances in the extent of the given class (including subclasses).</P>
* <P> Eviction acts as a hint to the persistence provider, and indicates that the persistent object is no longer
* needed by the application and may be garbage collected. It does not remove the object from the L1 cache and only
* affects objects which are managed and unmodified.
* </P>
* @param cls All clean instances of this class will be evicted.
*/
void evictAll(Class cls);
/**
* <P>Evict all persistent-clean and persistent-nontransactional
* instances in the given {@link Extent}.</P>
* <P> Eviction acts as a hint to the persistence provider, and indicates that the persistent object is no longer
* needed by the application and may be garbage collected. It does not remove the object from the L1 cache and only
* affects objects which are managed and unmodified.
* </P>
* @param extent Extend which contains the persistent classes to evict.
*/
void evictAll(Extent extent);
/**
* Detach the specified object from the entity manager, detaching based on
* the AutoDetach value specified and returning a copy of the detached
* entity.
*
* @param pc the instance to detach
* @return the detached instance
*
* @since 2.0.0
*
* Note: This method provides the same contract as the detach method with
* signature: public <T> T detach(T pc) available in the 1.x release of
* OpenJPA. The JPA 2.0 specification defined a method with an incompatible
* signature and different semantics. The specification defined method
* trumped the existing method.
*/
<T> T detachCopy(T pc);
/**
* Detach the specified objects from the entity manager.
*
* @param pcs the instances to detach
* @return the detached instances
*/
Collection detachAll(Collection pcs);
/**
* Detach the specified objects from the entity manager.
*
* @param pcs the instances to detach
* @return the detached instances
*/
Object[] detachAll(Object... pcs);
/**
* Merge the specified objects into the entity manager.
*
* @param pcs instances to import
* @return the re-attached instances
*/
Object[] mergeAll(Object... pcs);
/**
* Merge the specified detached objects into the entity manager.
*
* @param pcs Collection of instances to import
* @return the re-attached instances
*/
Collection mergeAll(Collection pcs);
/**
* Make the given object transactional.
*
* @param pc instance to make transactional
* @param updateVersion if true, the instance's version will be
* incremented at the next flush
*/
void transactional(Object pc, boolean updateVersion);
/**
* Make the given objects transactional.
*
* @param objs instances to make transactional
* @param updateVersion if true, the instance's version will be
* incremented at the next flush
*/
void transactionalAll(Collection objs, boolean updateVersion);
/**
* Make the given objects transactional.
*
* @param objs instances to make transactional
* @param updateVersion if true, the instance's version will be
* incremented at the next flush
*/
void transactionalAll(Object[] objs, boolean updateVersion);
/**
* Make the given object nontransactional.
*/
void nontransactional(Object pc);
/**
* Make the given objects nontransactional.
*/
void nontransactionalAll(Collection objs);
/**
* Make the given objects nontransactional.
*/
void nontransactionalAll(Object[] objs);
////////////////////////////
// Extent, Query, Generator
////////////////////////////
/**
* Return the named generator defined in the metadata.
*/
Generator getNamedGenerator(String name);
/**
* Returns a {@link Generator} for the datastore identity values of the
* specified type, or null if the type is unmanaged or its identity
* cannot be represented by a sequence.
*/
Generator getIdGenerator(Class forClass);
/**
* Returns a {@link Generator} for the generated values of the specified
* type, or null if the field is not generated.
*/
Generator getFieldGenerator(Class forClass, String fieldName);
/**
* Return an extent of the given class, optionally including subclasses.
*/
<T> Extent<T> createExtent(Class<T> cls, boolean subs);
@Override OpenJPAQuery createQuery(String query);
@Override OpenJPAQuery createNamedQuery(String name);
@Override OpenJPAQuery createNativeQuery(String sql);
@Override OpenJPAQuery createNativeQuery(String sql, Class resultClass);
@Override OpenJPAQuery createNativeQuery(String sql, String resultMapping);
/**
* Create a new query from the given one.
*/
OpenJPAQuery createQuery(Query query);
/**
* Create a new query in the given language.
*/
OpenJPAQuery createQuery(String language, String query);
/**
* Create an executable query from a dynamically defined query.
*
* @since 2.0.0
*/
OpenJPAQuery createDynamicQuery(QueryDefinition dynamic);
///////////
// Locking
///////////
/**
* Return the lock mode of the given instance, or null if not locked.
*/
@Override LockModeType getLockMode(Object pc);
/**
* Ensure that the given instance is locked at the given lock level.
*
* @param pc the object to lock
* @param mode the lock level to use
* @param timeout the number of milliseconds to wait for the lock before
* giving up, or -1 for no limit
*/
void lock(Object pc, LockModeType mode, int timeout);
/**
* Ensure that the given instance is locked at the current lock level, as
* set in the {@link FetchPlan} for the entity manager.
*/
void lock(Object pc);
/**
* Ensure that the given instances are locked at the given lock level.
*
* @param pcs the objects to lock
* @param mode the lock level to use
* @param timeout the number of milliseconds to wait for the lock before
* giving up, or -1 for no limit
*/
void lockAll(Collection pcs, LockModeType mode, int timeout);
/**
* Ensure that the given instances are locked at the current lock level,
* as set in the {@link FetchPlan} for the entity manager.
*/
void lockAll(Collection pcs);
/**
* Ensure that the given instances are locked at the given lock level.
*
* @param pcs the objects to lock
* @param mode the lock level to use
* @param timeout the number of milliseconds to wait for the lock before
* giving up, or -1 for no limit
*/
void lockAll(Object[] pcs, LockModeType mode, int timeout);
/**
* Ensure that the given instances are locked at the current lock level,
* as set in the {@link FetchPlan} for the entity manager.
*/
void lockAll(Object... pcs);
//////////////
// Connection
//////////////
/**
* Cancel all pending data store statements. If statements are cancelled
* while a flush is in progress, the transaction rollback only flag will
* be set.
*
* @return true if any statements were cancelled, false otherwise
*/
boolean cancelAll();
/**
* Return the connection in use by the entity manager, or a new
* connection if none.
*/
Object getConnection();
/////////
// Cache
/////////
/**
* Return a set of all managed instances.
*/
Collection getManagedObjects();
/**
* Return a set of current transaction instances.
*/
Collection getTransactionalObjects();
/**
* Return a set of instances which will become transactional upon
* the next transaction.
*/
Collection getPendingTransactionalObjects();
/**
* Return a set of current dirty instances.
*/
Collection getDirtyObjects();
/**
* Whether dirty objects will be returned in the order they were dirtied.
* Default is determined by the store manager.
*/
boolean getOrderDirtyObjects();
/**
* Whether dirty objects will be returned in the order they were dirtied.
* Default is determined by the store manager.
*/
void setOrderDirtyObjects(boolean order);
/**
* Mark the given class as dirty within the current transaction.
*/
void dirtyClass(Class cls);
/**
* Return the set of classes that have been made persistent in the current
* transaction.
*/
Collection<Class> getPersistedClasses();
/**
* Return the set of classes that have been deleted in the current
* transaction.
*/
Collection<Class> getRemovedClasses();
/**
* Return the set of classes for objects that have been modified
* in the current transaction.
*/
Collection<Class> getUpdatedClasses();
/**
* Create a new instance of type <code>cls</code>. If <code>cls</code> is
* an interface or an abstract class whose abstract methods follow the
* JavaBeans convention, this method will create a concrete implementation
* according to the metadata that defines the class. If <code>cls</code>
* is a non-final concrete type that has metadata but does not implement
* {@link org.apache.openjpa.enhance.PersistenceCapable}, this method will
* create a subclass of the type that does implement
* {@link org.apache.openjpa.enhance.PersistenceCapable}, and will attempt
* to redefine the methods in <code>cls</code> to enable persistent
* attribute tracking. Otherwise, if <code>cls</code> is a managed type,
* this will return an instance of the specified class.
*
* @throws IllegalArgumentException if <code>cls</code> is not a managed
* type or interface.
*/
<T> T createInstance(Class<T> cls);
/**
* Make the named field of the given object dirty.
*/
void dirty(Object o, String field);
/**
* Return the oid of the given instance.
*/
Object getObjectId(Object o);
/**
* Return whether the given object is dirty.
*/
boolean isDirty(Object o);
/**
* Return whether the given object is transactional.
*/
boolean isTransactional(Object o);
/**
* Return whether the given object is persistent.
*/
boolean isPersistent(Object o);
/**
* Return whether the given object was made persistent in the current
* transaction.
*/
boolean isNewlyPersistent(Object o);
/**
* Return whether the given object is deleted.
*/
boolean isRemoved(Object o);
/**
* Returns <code>true</code> if <code>pc</code> is a detached object
* (one that can be reattached to a {@link EntityManager} via a call
* to {@link EntityManager#merge}); otherwise returns
* <code>false</code>.
*/
boolean isDetached (Object o);
/**
* Returns the current version indicator for <code>o</code>.
*/
Object getVersion (Object o);
/**
* @deprecated use the {@link ConnectionRetainMode} enum instead.
*/
@Deprecated int CONN_RETAIN_DEMAND =
ConnectionRetainModes.CONN_RETAIN_DEMAND;
/**
* @deprecated use the {@link ConnectionRetainMode} enum instead.
*/
@Deprecated int CONN_RETAIN_TRANS =
ConnectionRetainModes.CONN_RETAIN_TRANS;
/**
* @deprecated use the {@link ConnectionRetainMode} enum instead.
*/
@Deprecated int CONN_RETAIN_ALWAYS =
ConnectionRetainModes.CONN_RETAIN_ALWAYS;
/**
* @deprecated use the {@link DetachStateType} enum instead.
*/
@Deprecated int DETACH_FETCH_GROUPS =
DetachState.DETACH_FETCH_GROUPS;
/**
* @deprecated use the {@link DetachStateType} enum instead.
*/
@Deprecated int DETACH_FGS = DetachState.DETACH_FGS;
/**
* @deprecated use the {@link DetachStateType} enum instead.
*/
@Deprecated int DETACH_LOADED = DetachState.DETACH_LOADED;
/**
* @deprecated use the {@link DetachStateType} enum instead.
*/
@Deprecated int DETACH_ALL = DetachState.DETACH_ALL;
/**
* @deprecated use the {@link RestoreStateType} enum instead.
*/
@Deprecated int RESTORE_NONE = RestoreState.RESTORE_NONE;
/**
* @deprecated use the {@link RestoreStateType} enum instead.
*/
@Deprecated int RESTORE_IMMUTABLE = RestoreState.RESTORE_IMMUTABLE;
/**
* @deprecated use the {@link RestoreStateType} enum instead.
*/
@Deprecated int RESTORE_ALL = RestoreState.RESTORE_ALL;
/**
* @deprecated use the {@link AutoDetachType} enum instead.
*/
@Deprecated int DETACH_CLOSE = AutoDetach.DETACH_CLOSE;
/**
* @deprecated use the {@link AutoDetachType} enum instead.
*/
@Deprecated int DETACH_COMMIT = AutoDetach.DETACH_COMMIT;
/**
* @deprecated use the {@link AutoDetachType} enum instead.
*/
@Deprecated int DETACH_NONTXREAD = AutoDetach.DETACH_NONTXREAD;
/**
* @deprecated use the {@link AutoDetachType} enum instead.
*/
@Deprecated int DETACH_ROLLBACK = AutoDetach.DETACH_ROLLBACK;
/**
* @deprecated use the {@link AutoClearType} enum instead.
*/
@Deprecated int CLEAR_DATASTORE = AutoClear.CLEAR_DATASTORE;
/**
* @deprecated use the {@link AutoClearType} enum instead.
*/
@Deprecated int CLEAR_ALL = AutoClear.CLEAR_ALL;
/**
* @deprecated use the {@link CallbackMode} enum instead.
*/
@Deprecated int CALLBACK_FAIL_FAST =
CallbackModes.CALLBACK_FAIL_FAST;
/**
* @deprecated use the {@link CallbackMode} enum instead.
*/
@Deprecated int CALLBACK_IGNORE = CallbackModes.CALLBACK_IGNORE;
/**
* @deprecated use the {@link CallbackMode} enum instead.
*/
@Deprecated int CALLBACK_LOG = CallbackModes.CALLBACK_LOG;
/**
* @deprecated use the {@link CallbackMode} enum instead.
*/
@Deprecated int CALLBACK_RETHROW = CallbackModes.CALLBACK_RETHROW;
/**
* @deprecated use the {@link CallbackMode} enum instead.
*/
@Deprecated int CALLBACK_ROLLBACK = CallbackModes.CALLBACK_ROLLBACK;
/**
* @deprecated cast to {@link OpenJPAEntityManagerSPI} instead. This
* method pierces the published-API boundary, as does the SPI cast.
*/
@Deprecated org.apache.openjpa.conf.OpenJPAConfiguration getConfiguration();
/**
* @deprecated use {@link #setRestoreState(RestoreStateType)} instead.
*/
@Deprecated void setRestoreState(int restore);
/**
* @deprecated use {@link #setDetachState(DetachStateType)} instead.
*/
@Deprecated void setDetachState(int detach);
/**
* @deprecated use {@link #setAutoClear(AutoClearType)} instead.
*/
@Deprecated void setAutoClear(int autoClear);
/**
* @deprecated use {@link #setAutoDetach(AutoDetachType)} or
* {@link #setAutoDetach(java.util.EnumSet)} instead.
*/
@Deprecated void setAutoDetach(int autoDetachFlags);
/**
* @deprecated use {@link #setAutoDetach(AutoDetachType, boolean)} instead.
*/
@Deprecated void setAutoDetach(int flag, boolean on);
/**
* @deprecated use {@link #isTrackChangesByType()} instead.
*/
@Deprecated boolean isLargeTransaction();
/**
* @deprecated use {@link #setTrackChangesByType(boolean)} instead.
*/
@Deprecated void setLargeTransaction(boolean value);
/**
* @deprecated cast to {@link OpenJPAEntityManagerSPI} instead. This
* method pierces the published-API boundary, as does the SPI cast.
*/
@Deprecated void addTransactionListener(Object listener);
/**
* @deprecated cast to {@link OpenJPAEntityManagerSPI} instead. This
* method pierces the published-API boundary, as does the SPI cast.
*/
@Deprecated void removeTransactionListener(Object listener);
/**
* @deprecated cast to {@link OpenJPAEntityManagerSPI} instead. This
* method pierces the published-API boundary, as does the SPI cast.
*/
@Deprecated int getTransactionListenerCallbackMode();
/**
* @deprecated cast to {@link OpenJPAEntityManagerSPI} instead. This
* method pierces the published-API boundary, as does the SPI cast.
*/
@Deprecated void setTransactionListenerCallbackMode(int callbackMode);
/**
* @deprecated cast to {@link OpenJPAEntityManagerSPI} instead. This
* method pierces the published-API boundary, as does the SPI cast.
*/
@Deprecated void addLifecycleListener(Object listener, Class... classes);
/**
* @deprecated cast to {@link OpenJPAEntityManagerSPI} instead. This
* method pierces the published-API boundary, as does the SPI cast.
*/
@Deprecated void removeLifecycleListener(Object listener);
/**
* @deprecated cast to {@link OpenJPAEntityManagerSPI} instead. This
* method pierces the published-API boundary, as does the SPI cast.
*/
@Deprecated int getLifecycleListenerCallbackMode();
/**
* @deprecated cast to {@link OpenJPAEntityManagerSPI} instead. This
* method pierces the published-API boundary, as does the SPI cast.
*/
@Deprecated void setLifecycleListenerCallbackMode(int callbackMode);
/**
* @deprecated use {@link EntityTransaction#begin}
* instead: <code>em.getTransaction().begin()</code>
*/
@Deprecated
@Override void begin();
/**
* @deprecated use {@link EntityTransaction#commit}
* instead: <code>em.getTransaction().commit()</code>
*/
@Deprecated
@Override void commit();
/**
* @deprecated use {@link EntityTransaction#rollback}
* instead: <code>em.getTransaction().rollback()</code>
*/
@Deprecated
@Override void rollback();
/**
* @deprecated use {@link EntityTransaction#isActive}
* instead: <code>em.getTransaction().isActive()</code>
*/
@Deprecated
@Override boolean isActive();
/**
* @deprecated use {@link OpenJPAEntityTransaction#commitAndResume} instead:
* <code>em.getTransaction().commitAndResume()</code>
*/
@Deprecated void commitAndResume();
/**
* @deprecated use {@link OpenJPAEntityTransaction#rollbackAndResume}
* instead: <code>em.getTransaction().rollbackAndResume()</code>
*/
@Deprecated void rollbackAndResume();
/**
* @deprecated use {@link EntityTransaction#setRollbackOnly}
* instead: <code>em.getTransaction().setRollbackOnly()</code>
*/
@Deprecated
@Override void setRollbackOnly();
/**
* @deprecated use {@link OpenJPAEntityTransaction#setRollbackOnly}
* instead: <code>em.getTransaction().setRollbackOnly()</code>
*/
@Deprecated void setRollbackOnly(Throwable cause);
/**
* @deprecated use {@link OpenJPAEntityTransaction#getRollbackCause}
* instead: <code>em.getTransaction().getRollbackCause()</code>
*/
@Deprecated Throwable getRollbackCause();
/**
* @deprecated use {@link EntityTransaction#getRollbackOnly}
* instead: <code>em.getTransaction().getRollbackOnly()</code>
*/
@Deprecated
@Override boolean getRollbackOnly();
/**
* Gets the QueryBuilder with OpenJPA-extended capabilities.
*
* @since 2.0.0
*/
@Override OpenJPACriteriaBuilder getCriteriaBuilder();
/**
* Get the properties supported by this runtime.
*
* @since 2.0.0
*/
Set<String> getSupportedProperties();
}
|
googleapis/google-cloud-java | 35,999 | java-shopping-merchant-accounts/proto-google-shopping-merchant-accounts-v1beta/src/main/java/com/google/shopping/merchant/accounts/v1beta/UpdateAutofeedSettingsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/shopping/merchant/accounts/v1beta/autofeedsettings.proto
// Protobuf Java Version: 3.25.8
package com.google.shopping.merchant.accounts.v1beta;
/**
*
*
* <pre>
* Request message for the `UpdateAutofeedSettings` method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest}
*/
public final class UpdateAutofeedSettingsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest)
UpdateAutofeedSettingsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateAutofeedSettingsRequest.newBuilder() to construct.
private UpdateAutofeedSettingsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateAutofeedSettingsRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateAutofeedSettingsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.accounts.v1beta.AutofeedSettingsProto
.internal_static_google_shopping_merchant_accounts_v1beta_UpdateAutofeedSettingsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.accounts.v1beta.AutofeedSettingsProto
.internal_static_google_shopping_merchant_accounts_v1beta_UpdateAutofeedSettingsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest.class,
com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest.Builder
.class);
}
private int bitField0_;
public static final int AUTOFEED_SETTINGS_FIELD_NUMBER = 1;
private com.google.shopping.merchant.accounts.v1beta.AutofeedSettings autofeedSettings_;
/**
*
*
* <pre>
* Required. The new version of the autofeed setting.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.AutofeedSettings autofeed_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the autofeedSettings field is set.
*/
@java.lang.Override
public boolean hasAutofeedSettings() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The new version of the autofeed setting.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.AutofeedSettings autofeed_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The autofeedSettings.
*/
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.AutofeedSettings getAutofeedSettings() {
return autofeedSettings_ == null
? com.google.shopping.merchant.accounts.v1beta.AutofeedSettings.getDefaultInstance()
: autofeedSettings_;
}
/**
*
*
* <pre>
* Required. The new version of the autofeed setting.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.AutofeedSettings autofeed_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.AutofeedSettingsOrBuilder
getAutofeedSettingsOrBuilder() {
return autofeedSettings_ == null
? com.google.shopping.merchant.accounts.v1beta.AutofeedSettings.getDefaultInstance()
: autofeedSettings_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getAutofeedSettings());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getAutofeedSettings());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest)) {
return super.equals(obj);
}
com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest other =
(com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest) obj;
if (hasAutofeedSettings() != other.hasAutofeedSettings()) return false;
if (hasAutofeedSettings()) {
if (!getAutofeedSettings().equals(other.getAutofeedSettings())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasAutofeedSettings()) {
hash = (37 * hash) + AUTOFEED_SETTINGS_FIELD_NUMBER;
hash = (53 * hash) + getAutofeedSettings().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for the `UpdateAutofeedSettings` method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest)
com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.accounts.v1beta.AutofeedSettingsProto
.internal_static_google_shopping_merchant_accounts_v1beta_UpdateAutofeedSettingsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.accounts.v1beta.AutofeedSettingsProto
.internal_static_google_shopping_merchant_accounts_v1beta_UpdateAutofeedSettingsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest.class,
com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest.Builder
.class);
}
// Construct using
// com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getAutofeedSettingsFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
autofeedSettings_ = null;
if (autofeedSettingsBuilder_ != null) {
autofeedSettingsBuilder_.dispose();
autofeedSettingsBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.shopping.merchant.accounts.v1beta.AutofeedSettingsProto
.internal_static_google_shopping_merchant_accounts_v1beta_UpdateAutofeedSettingsRequest_descriptor;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest
getDefaultInstanceForType() {
return com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest build() {
com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest
buildPartial() {
com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest result =
new com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.autofeedSettings_ =
autofeedSettingsBuilder_ == null ? autofeedSettings_ : autofeedSettingsBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest) {
return mergeFrom(
(com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest other) {
if (other
== com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest
.getDefaultInstance()) return this;
if (other.hasAutofeedSettings()) {
mergeAutofeedSettings(other.getAutofeedSettings());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(
getAutofeedSettingsFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.shopping.merchant.accounts.v1beta.AutofeedSettings autofeedSettings_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.shopping.merchant.accounts.v1beta.AutofeedSettings,
com.google.shopping.merchant.accounts.v1beta.AutofeedSettings.Builder,
com.google.shopping.merchant.accounts.v1beta.AutofeedSettingsOrBuilder>
autofeedSettingsBuilder_;
/**
*
*
* <pre>
* Required. The new version of the autofeed setting.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.AutofeedSettings autofeed_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the autofeedSettings field is set.
*/
public boolean hasAutofeedSettings() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The new version of the autofeed setting.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.AutofeedSettings autofeed_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The autofeedSettings.
*/
public com.google.shopping.merchant.accounts.v1beta.AutofeedSettings getAutofeedSettings() {
if (autofeedSettingsBuilder_ == null) {
return autofeedSettings_ == null
? com.google.shopping.merchant.accounts.v1beta.AutofeedSettings.getDefaultInstance()
: autofeedSettings_;
} else {
return autofeedSettingsBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The new version of the autofeed setting.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.AutofeedSettings autofeed_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setAutofeedSettings(
com.google.shopping.merchant.accounts.v1beta.AutofeedSettings value) {
if (autofeedSettingsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
autofeedSettings_ = value;
} else {
autofeedSettingsBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The new version of the autofeed setting.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.AutofeedSettings autofeed_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setAutofeedSettings(
com.google.shopping.merchant.accounts.v1beta.AutofeedSettings.Builder builderForValue) {
if (autofeedSettingsBuilder_ == null) {
autofeedSettings_ = builderForValue.build();
} else {
autofeedSettingsBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The new version of the autofeed setting.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.AutofeedSettings autofeed_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeAutofeedSettings(
com.google.shopping.merchant.accounts.v1beta.AutofeedSettings value) {
if (autofeedSettingsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& autofeedSettings_ != null
&& autofeedSettings_
!= com.google.shopping.merchant.accounts.v1beta.AutofeedSettings
.getDefaultInstance()) {
getAutofeedSettingsBuilder().mergeFrom(value);
} else {
autofeedSettings_ = value;
}
} else {
autofeedSettingsBuilder_.mergeFrom(value);
}
if (autofeedSettings_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The new version of the autofeed setting.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.AutofeedSettings autofeed_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearAutofeedSettings() {
bitField0_ = (bitField0_ & ~0x00000001);
autofeedSettings_ = null;
if (autofeedSettingsBuilder_ != null) {
autofeedSettingsBuilder_.dispose();
autofeedSettingsBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The new version of the autofeed setting.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.AutofeedSettings autofeed_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.shopping.merchant.accounts.v1beta.AutofeedSettings.Builder
getAutofeedSettingsBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getAutofeedSettingsFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The new version of the autofeed setting.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.AutofeedSettings autofeed_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.shopping.merchant.accounts.v1beta.AutofeedSettingsOrBuilder
getAutofeedSettingsOrBuilder() {
if (autofeedSettingsBuilder_ != null) {
return autofeedSettingsBuilder_.getMessageOrBuilder();
} else {
return autofeedSettings_ == null
? com.google.shopping.merchant.accounts.v1beta.AutofeedSettings.getDefaultInstance()
: autofeedSettings_;
}
}
/**
*
*
* <pre>
* Required. The new version of the autofeed setting.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.AutofeedSettings autofeed_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.shopping.merchant.accounts.v1beta.AutofeedSettings,
com.google.shopping.merchant.accounts.v1beta.AutofeedSettings.Builder,
com.google.shopping.merchant.accounts.v1beta.AutofeedSettingsOrBuilder>
getAutofeedSettingsFieldBuilder() {
if (autofeedSettingsBuilder_ == null) {
autofeedSettingsBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.shopping.merchant.accounts.v1beta.AutofeedSettings,
com.google.shopping.merchant.accounts.v1beta.AutofeedSettings.Builder,
com.google.shopping.merchant.accounts.v1beta.AutofeedSettingsOrBuilder>(
getAutofeedSettings(), getParentForChildren(), isClean());
autofeedSettings_ = null;
}
return autofeedSettingsBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest)
}
// @@protoc_insertion_point(class_scope:google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest)
private static final com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest();
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateAutofeedSettingsRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateAutofeedSettingsRequest>() {
@java.lang.Override
public UpdateAutofeedSettingsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateAutofeedSettingsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateAutofeedSettingsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.UpdateAutofeedSettingsRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,043 | java-video-intelligence/proto-google-cloud-video-intelligence-v1beta2/src/main/java/com/google/cloud/videointelligence/v1beta2/AnnotateVideoProgress.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/videointelligence/v1beta2/video_intelligence.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.videointelligence.v1beta2;
/**
*
*
* <pre>
* Video annotation progress. Included in the `metadata`
* field of the `Operation` returned by the `GetOperation`
* call of the `google::longrunning::Operations` service.
* </pre>
*
* Protobuf type {@code google.cloud.videointelligence.v1beta2.AnnotateVideoProgress}
*/
public final class AnnotateVideoProgress extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.videointelligence.v1beta2.AnnotateVideoProgress)
AnnotateVideoProgressOrBuilder {
private static final long serialVersionUID = 0L;
// Use AnnotateVideoProgress.newBuilder() to construct.
private AnnotateVideoProgress(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private AnnotateVideoProgress() {
annotationProgress_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new AnnotateVideoProgress();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.videointelligence.v1beta2.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1beta2_AnnotateVideoProgress_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.videointelligence.v1beta2.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1beta2_AnnotateVideoProgress_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress.class,
com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress.Builder.class);
}
public static final int ANNOTATION_PROGRESS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgress>
annotationProgress_;
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgress>
getAnnotationProgressList() {
return annotationProgress_;
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
@java.lang.Override
public java.util.List<
? extends com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgressOrBuilder>
getAnnotationProgressOrBuilderList() {
return annotationProgress_;
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
@java.lang.Override
public int getAnnotationProgressCount() {
return annotationProgress_.size();
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
@java.lang.Override
public com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgress getAnnotationProgress(
int index) {
return annotationProgress_.get(index);
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
@java.lang.Override
public com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgressOrBuilder
getAnnotationProgressOrBuilder(int index) {
return annotationProgress_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < annotationProgress_.size(); i++) {
output.writeMessage(1, annotationProgress_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < annotationProgress_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(1, annotationProgress_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress)) {
return super.equals(obj);
}
com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress other =
(com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress) obj;
if (!getAnnotationProgressList().equals(other.getAnnotationProgressList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAnnotationProgressCount() > 0) {
hash = (37 * hash) + ANNOTATION_PROGRESS_FIELD_NUMBER;
hash = (53 * hash) + getAnnotationProgressList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Video annotation progress. Included in the `metadata`
* field of the `Operation` returned by the `GetOperation`
* call of the `google::longrunning::Operations` service.
* </pre>
*
* Protobuf type {@code google.cloud.videointelligence.v1beta2.AnnotateVideoProgress}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.videointelligence.v1beta2.AnnotateVideoProgress)
com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgressOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.videointelligence.v1beta2.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1beta2_AnnotateVideoProgress_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.videointelligence.v1beta2.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1beta2_AnnotateVideoProgress_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress.class,
com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress.Builder.class);
}
// Construct using com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (annotationProgressBuilder_ == null) {
annotationProgress_ = java.util.Collections.emptyList();
} else {
annotationProgress_ = null;
annotationProgressBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.videointelligence.v1beta2.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1beta2_AnnotateVideoProgress_descriptor;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress
getDefaultInstanceForType() {
return com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress build() {
com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress buildPartial() {
com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress result =
new com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress result) {
if (annotationProgressBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
annotationProgress_ = java.util.Collections.unmodifiableList(annotationProgress_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.annotationProgress_ = annotationProgress_;
} else {
result.annotationProgress_ = annotationProgressBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress) {
return mergeFrom((com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress other) {
if (other
== com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress.getDefaultInstance())
return this;
if (annotationProgressBuilder_ == null) {
if (!other.annotationProgress_.isEmpty()) {
if (annotationProgress_.isEmpty()) {
annotationProgress_ = other.annotationProgress_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAnnotationProgressIsMutable();
annotationProgress_.addAll(other.annotationProgress_);
}
onChanged();
}
} else {
if (!other.annotationProgress_.isEmpty()) {
if (annotationProgressBuilder_.isEmpty()) {
annotationProgressBuilder_.dispose();
annotationProgressBuilder_ = null;
annotationProgress_ = other.annotationProgress_;
bitField0_ = (bitField0_ & ~0x00000001);
annotationProgressBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getAnnotationProgressFieldBuilder()
: null;
} else {
annotationProgressBuilder_.addAllMessages(other.annotationProgress_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgress m =
input.readMessage(
com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgress.parser(),
extensionRegistry);
if (annotationProgressBuilder_ == null) {
ensureAnnotationProgressIsMutable();
annotationProgress_.add(m);
} else {
annotationProgressBuilder_.addMessage(m);
}
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgress>
annotationProgress_ = java.util.Collections.emptyList();
private void ensureAnnotationProgressIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
annotationProgress_ =
new java.util.ArrayList<
com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgress>(
annotationProgress_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgress,
com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgress.Builder,
com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgressOrBuilder>
annotationProgressBuilder_;
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public java.util.List<com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgress>
getAnnotationProgressList() {
if (annotationProgressBuilder_ == null) {
return java.util.Collections.unmodifiableList(annotationProgress_);
} else {
return annotationProgressBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public int getAnnotationProgressCount() {
if (annotationProgressBuilder_ == null) {
return annotationProgress_.size();
} else {
return annotationProgressBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgress getAnnotationProgress(
int index) {
if (annotationProgressBuilder_ == null) {
return annotationProgress_.get(index);
} else {
return annotationProgressBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public Builder setAnnotationProgress(
int index, com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgress value) {
if (annotationProgressBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAnnotationProgressIsMutable();
annotationProgress_.set(index, value);
onChanged();
} else {
annotationProgressBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public Builder setAnnotationProgress(
int index,
com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgress.Builder
builderForValue) {
if (annotationProgressBuilder_ == null) {
ensureAnnotationProgressIsMutable();
annotationProgress_.set(index, builderForValue.build());
onChanged();
} else {
annotationProgressBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public Builder addAnnotationProgress(
com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgress value) {
if (annotationProgressBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAnnotationProgressIsMutable();
annotationProgress_.add(value);
onChanged();
} else {
annotationProgressBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public Builder addAnnotationProgress(
int index, com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgress value) {
if (annotationProgressBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAnnotationProgressIsMutable();
annotationProgress_.add(index, value);
onChanged();
} else {
annotationProgressBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public Builder addAnnotationProgress(
com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgress.Builder
builderForValue) {
if (annotationProgressBuilder_ == null) {
ensureAnnotationProgressIsMutable();
annotationProgress_.add(builderForValue.build());
onChanged();
} else {
annotationProgressBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public Builder addAnnotationProgress(
int index,
com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgress.Builder
builderForValue) {
if (annotationProgressBuilder_ == null) {
ensureAnnotationProgressIsMutable();
annotationProgress_.add(index, builderForValue.build());
onChanged();
} else {
annotationProgressBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public Builder addAllAnnotationProgress(
java.lang.Iterable<
? extends com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgress>
values) {
if (annotationProgressBuilder_ == null) {
ensureAnnotationProgressIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, annotationProgress_);
onChanged();
} else {
annotationProgressBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public Builder clearAnnotationProgress() {
if (annotationProgressBuilder_ == null) {
annotationProgress_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
annotationProgressBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public Builder removeAnnotationProgress(int index) {
if (annotationProgressBuilder_ == null) {
ensureAnnotationProgressIsMutable();
annotationProgress_.remove(index);
onChanged();
} else {
annotationProgressBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgress.Builder
getAnnotationProgressBuilder(int index) {
return getAnnotationProgressFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgressOrBuilder
getAnnotationProgressOrBuilder(int index) {
if (annotationProgressBuilder_ == null) {
return annotationProgress_.get(index);
} else {
return annotationProgressBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public java.util.List<
? extends com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgressOrBuilder>
getAnnotationProgressOrBuilderList() {
if (annotationProgressBuilder_ != null) {
return annotationProgressBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(annotationProgress_);
}
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgress.Builder
addAnnotationProgressBuilder() {
return getAnnotationProgressFieldBuilder()
.addBuilder(
com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgress
.getDefaultInstance());
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgress.Builder
addAnnotationProgressBuilder(int index) {
return getAnnotationProgressFieldBuilder()
.addBuilder(
index,
com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgress
.getDefaultInstance());
}
/**
*
*
* <pre>
* Progress metadata for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationProgress annotation_progress = 1;
* </code>
*/
public java.util.List<
com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgress.Builder>
getAnnotationProgressBuilderList() {
return getAnnotationProgressFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgress,
com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgress.Builder,
com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgressOrBuilder>
getAnnotationProgressFieldBuilder() {
if (annotationProgressBuilder_ == null) {
annotationProgressBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgress,
com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgress.Builder,
com.google.cloud.videointelligence.v1beta2.VideoAnnotationProgressOrBuilder>(
annotationProgress_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
annotationProgress_ = null;
}
return annotationProgressBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.videointelligence.v1beta2.AnnotateVideoProgress)
}
// @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta2.AnnotateVideoProgress)
private static final com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress();
}
public static com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<AnnotateVideoProgress> PARSER =
new com.google.protobuf.AbstractParser<AnnotateVideoProgress>() {
@java.lang.Override
public AnnotateVideoProgress parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<AnnotateVideoProgress> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<AnnotateVideoProgress> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1beta2.AnnotateVideoProgress
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hama | 35,994 | yarn/src/main/java/org/apache/hama/bsp/ApplicationMaster.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hama.bsp;
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.ExitUtil;
import org.apache.hadoop.yarn.api.ApplicationConstants;
import org.apache.hadoop.yarn.api.ContainerManagementProtocol;
import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse;
import org.apache.hadoop.yarn.api.records.*;
import org.apache.hadoop.yarn.client.api.AMRMClient;
import org.apache.hadoop.yarn.client.api.async.AMRMClientAsync;
import org.apache.hadoop.yarn.client.api.async.NMClientAsync;
import org.apache.hadoop.yarn.client.api.async.impl.NMClientAsyncImpl;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.security.AMRMTokenIdentifier;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.util.Records;
import org.apache.hama.Constants;
import org.apache.hama.HamaConfiguration;
import org.apache.hama.bsp.sync.SyncServer;
import org.apache.hama.bsp.sync.SyncServiceFactory;
import org.apache.hama.ipc.BSPPeerProtocol;
import org.apache.hama.ipc.RPC;
import org.apache.hama.ipc.Server;
import org.apache.hama.util.BSPNetUtils;
import org.apache.log4j.LogManager;
import java.io.DataInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.ByteBuffer;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicInteger;
public class ApplicationMaster implements BSPClient, BSPPeerProtocol {
private static final Log LOG = LogFactory.getLog(ApplicationMaster.class);
// Configuration
private Configuration localConf;
private Configuration jobConf;
private String jobFile;
private String applicationName;
// RPC info where the AM receive client side requests
private String hostname;
private int clientPort;
private FileSystem fs;
private volatile long superstep;
private Counters globalCounter = new Counters();
private BSPJobClient.RawSplit[] splits;
// Hama job id
private BSPJobID jobId;
// Partiion id
private static AtomicInteger ai = new AtomicInteger(-1);
// SyncServer for Zookeeper
private SyncServer syncServer;
// Zookeeper thread pool
private static final ExecutorService threadPool = Executors
.newFixedThreadPool(1);
// RPC info where the AM receive client side requests
private int taskServerPort;
private Server clientServer;
private Server taskServer;
// Handle to communicate with the Resource Manager
@SuppressWarnings("rawtypes")
private AMRMClientAsync amRMClient;
// In both secure and non-secure modes, this points to the job-submitter.
@VisibleForTesting
UserGroupInformation appSubmitterUgi;
// Handle to communicate with the Node Manager
private NMClientAsync nmClientAsync;
// Listen to process the response from the Node Manager
private NMCallbackHandler containerListener;
// Application Attempt Id ( combination of attemptId and fail count )
@VisibleForTesting
protected ApplicationAttemptId appAttemptID;
// TODO
// For status update for clients - yet to be implemented
// Hostname of the container
private String appMasterHostname = "";
// Port on which the app master listens for status updates from clients
private int appMasterRpcPort = -1;
// Tracking url to which app master publishes info for clients to monitor
private String appMasterTrackingUrl = "";
// App Master configuration
// No. of containers to run shell command on
@VisibleForTesting
protected int numTotalContainers;
// Memory to request for the container on which the shell command will run
private int containerMemory;
// VirtualCores to request for the container on which the shell command will
// run
private int containerVirtualCores = 1;
// Priority of the request
private int requestPriority = 0;
// Counter for completed containers ( complete denotes successful or failed )
private AtomicInteger numCompletedContainers = new AtomicInteger();
// Allocated container count so that we know how many containers has the RM
// allocated to us
@VisibleForTesting
protected AtomicInteger numAllocatedContainers = new AtomicInteger();
// Count of failed containers
private AtomicInteger numFailedContainers = new AtomicInteger();
// Count of containers already requested from the RM
// Needed as once requested, we should not request for containers again.
// Only request for more if the original requirement changes.
@VisibleForTesting
protected AtomicInteger numRequestedContainers = new AtomicInteger();
private volatile boolean done;
private ByteBuffer allTokens;
// Launch threads
private List<Thread> launchThreads = new ArrayList<Thread>();
@VisibleForTesting
protected final Set<ContainerId> launchedContainers = Collections
.newSetFromMap(new ConcurrentHashMap<ContainerId, Boolean>());
public ApplicationMaster() {
// Set up the configuration
this.localConf = new YarnConfiguration();
}
public static void main(String[] args) throws IOException {
boolean result = false;
ApplicationMaster appMaster = new ApplicationMaster();
try {
LOG.info("Initializing ApplicationMaster");
boolean doRun = appMaster.init(args);
if (!doRun) {
System.exit(0);
}
appMaster.run();
result = appMaster.finish();
} catch (Throwable t) {
LOG.fatal("Error running ApplicationMaster", t);
LogManager.shutdown();
ExitUtil.terminate(1, t);
} finally {
LOG.info("Stop SyncServer and RPCServer.");
appMaster.close();
}
if (result) {
LOG.info("Application Master completed successfully. exiting");
System.exit(0);
} else {
LOG.info("Application Master failed. exiting");
System.exit(2);
}
}
public boolean init(String[] args) throws Exception {
if (args.length != 1) {
throw new IllegalArgumentException();
}
this.jobFile = args[0];
this.jobConf = getSubmitConfiguration(jobFile);
localConf.addResource(localConf);
fs = FileSystem.get(jobConf);
this.applicationName = jobConf.get("bsp.job.name",
"<no bsp job name defined>");
if (applicationName.isEmpty()) {
this.applicationName = "<no bsp job name defined>";
}
appAttemptID = getApplicationAttemptId();
this.jobId = new BSPJobID(appAttemptID.toString(), 0);
this.appMasterHostname = BSPNetUtils.getCanonicalHostname();
this.appMasterTrackingUrl = "http://localhost:8088";
this.numTotalContainers = this.jobConf.getInt("bsp.peers.num", 1);
this.containerMemory = getMemoryRequirements(jobConf);
this.hostname = BSPNetUtils.getCanonicalHostname();
this.clientPort = BSPNetUtils.getFreePort(12000);
// Set configuration for starting SyncServer which run Zookeeper
this.jobConf.set(Constants.ZOOKEEPER_QUORUM, appMasterHostname);
// start our synchronization service
startSyncServer();
// start RPC server
startRPCServers();
/*
* Make sure that this executes after the start the RPC servers, because we
* are readjusting the configuration.
*/
rewriteSubmitConfiguration(jobFile, jobConf);
String jobSplit = jobConf.get("bsp.job.split.file");
splits = null;
if (jobSplit != null) {
DataInputStream splitFile = fs.open(new Path(jobSplit));
try {
splits = BSPJobClient.readSplitFile(splitFile);
} finally {
splitFile.close();
}
}
return true;
}
/**
* Main run function for the application master
*
* @throws org.apache.hadoop.yarn.exceptions.YarnException
* @throws IOException
*/
@SuppressWarnings({ "unchecked" })
public void run() throws YarnException, IOException, InterruptedException {
LOG.info("Starting ApplicationMaster");
// Note: Credentials, Token, UserGroupInformation, DataOutputBuffer class
// are marked as LimitedPrivate
Credentials credentials = UserGroupInformation.getCurrentUser()
.getCredentials();
DataOutputBuffer dob = new DataOutputBuffer();
credentials.writeTokenStorageToStream(dob);
// Now remove the AM->RM token so that containers cannot access it.
Iterator<Token<?>> iter = credentials.getAllTokens().iterator();
LOG.info("Executing with tokens:");
while (iter.hasNext()) {
Token<?> token = iter.next();
LOG.info(token);
if (token.getKind().equals(AMRMTokenIdentifier.KIND_NAME)) {
iter.remove();
}
}
allTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
// Create appSubmitterUgi and add original tokens to it
String appSubmitterUserName = System
.getenv(ApplicationConstants.Environment.USER.name());
appSubmitterUgi = UserGroupInformation
.createRemoteUser(appSubmitterUserName);
appSubmitterUgi.addCredentials(credentials);
AMRMClientAsync.CallbackHandler allocListener = new RMCallbackHandler();
amRMClient = AMRMClientAsync.createAMRMClientAsync(1000, allocListener);
amRMClient.init(localConf);
amRMClient.start();
containerListener = createNMCallbackHandler();
nmClientAsync = new NMClientAsyncImpl(containerListener);
nmClientAsync.init(localConf);
nmClientAsync.start();
// Setup local RPC Server to accept status requests directly from clients
// TODO need to setup a protocol for client to be able to communicate to
// the RPC server
// TODO use the rpc port info to register with the RM for the client to
// send requests to this app master
// Register self with ResourceManager
// This will start heartbeating to the RM
appMasterHostname = NetUtils.getHostname();
RegisterApplicationMasterResponse response = amRMClient
.registerApplicationMaster(appMasterHostname, appMasterRpcPort,
appMasterTrackingUrl);
// Dump out information about cluster capability as seen by the
// resource manager
int maxMem = response.getMaximumResourceCapability().getMemory();
LOG.info("Max mem capability of resources in this cluster " + maxMem);
int maxVCores = response.getMaximumResourceCapability().getVirtualCores();
LOG.info("Max vcores capability of resources in this cluster " + maxVCores);
// A resource ask cannot exceed the max.
if (containerMemory > maxMem) {
LOG.info("Container memory specified above max threshold of cluster."
+ " Using max value." + ", specified=" + containerMemory + ", max="
+ maxMem);
containerMemory = maxMem;
}
if (containerVirtualCores > maxVCores) {
LOG.info("Container virtual cores specified above max threshold of cluster."
+ " Using max value."
+ ", specified="
+ containerVirtualCores
+ ", max=" + maxVCores);
containerVirtualCores = maxVCores;
}
List<Container> previousAMRunningContainers = response
.getContainersFromPreviousAttempts();
LOG.info(appAttemptID + " received " + previousAMRunningContainers.size()
+ " previous attempts' running containers on AM registration.");
for (Container container : previousAMRunningContainers) {
launchedContainers.add(container.getId());
}
numAllocatedContainers.addAndGet(previousAMRunningContainers.size());
int numTotalContainersToRequest = numTotalContainers
- previousAMRunningContainers.size();
// Setup ask for containers from RM
// Send request for containers to RM
// Until we get our fully allocated quota, we keep on polling RM for
// containers
// Keep looping until all the containers are launched and shell script
// executed on them ( regardless of success/failure).
for (int i = 0; i < numTotalContainersToRequest; ++i) {
AMRMClient.ContainerRequest containerAsk = setupContainerAskForRM();
amRMClient.addContainerRequest(containerAsk);
}
numRequestedContainers.set(numTotalContainers);
}
@VisibleForTesting
NMCallbackHandler createNMCallbackHandler() {
return new NMCallbackHandler(this);
}
@VisibleForTesting
protected boolean finish() {
// wait for completion.
while (!done && (numCompletedContainers.get() != numTotalContainers)) {
try {
Thread.sleep(200);
} catch (InterruptedException ex) {
}
}
// Join all launched threads
// needed for when we time out
// and we need to release containers
for (Thread launchThread : launchThreads) {
try {
launchThread.join(10000);
} catch (InterruptedException e) {
LOG.info("Exception thrown in thread join: " + e.getMessage());
e.printStackTrace();
}
}
// When the application completes, it should stop all running containers
LOG.info("Application completed. Stopping running containers");
nmClientAsync.stop();
// When the application completes, it should send a finish application
// signal to the RM
LOG.info("Application completed. Signalling finish to RM");
FinalApplicationStatus appStatus;
String appMessage = null;
boolean success = true;
if (numFailedContainers.get() == 0
&& numCompletedContainers.get() == numTotalContainers) {
appStatus = FinalApplicationStatus.SUCCEEDED;
} else {
appStatus = FinalApplicationStatus.FAILED;
appMessage = "Diagnostics." + ", total=" + numTotalContainers
+ ", completed=" + numCompletedContainers.get() + ", allocated="
+ numAllocatedContainers.get() + ", failed="
+ numFailedContainers.get();
LOG.info(appMessage);
success = false;
}
try {
amRMClient.unregisterApplicationMaster(appStatus, appMessage, null);
} catch (YarnException ex) {
LOG.error("Failed to unregister application", ex);
} catch (IOException e) {
LOG.error("Failed to unregister application", e);
}
amRMClient.stop();
return success;
}
private class RMCallbackHandler implements AMRMClientAsync.CallbackHandler {
@SuppressWarnings("unchecked")
@Override
public void onContainersCompleted(List<ContainerStatus> completedContainers) {
LOG.info("Got response from RM for container ask, completedCnt="
+ completedContainers.size());
for (ContainerStatus containerStatus : completedContainers) {
LOG.info(appAttemptID + " got container status for containerID="
+ containerStatus.getContainerId() + ", state="
+ containerStatus.getState() + ", exitStatus="
+ containerStatus.getExitStatus() + ", diagnostics="
+ containerStatus.getDiagnostics());
// non complete containers should not be here
assert (containerStatus.getState() == ContainerState.COMPLETE);
// ignore containers we know nothing about - probably from a previous
// attempt
if (!launchedContainers.contains(containerStatus.getContainerId())) {
LOG.info("Ignoring completed status of "
+ containerStatus.getContainerId()
+ "; unknown container(probably launched by previous attempt)");
continue;
}
// increment counters for completed/failed containers
int exitStatus = containerStatus.getExitStatus();
if (0 != exitStatus) {
// container failed
if (ContainerExitStatus.ABORTED != exitStatus) {
// shell script failed
// counts as completed
numCompletedContainers.incrementAndGet();
numFailedContainers.incrementAndGet();
} else {
// container was killed by framework, possibly preempted
// we should re-try as the container was lost for some reason
numAllocatedContainers.decrementAndGet();
numRequestedContainers.decrementAndGet();
// we do not need to release the container as it would be done
// by the RM
}
} else {
// nothing to do
// container completed successfully
numCompletedContainers.incrementAndGet();
LOG.info("Container completed successfully." + ", containerId="
+ containerStatus.getContainerId());
}
}
// ask for more containers if any failed
int askCount = numTotalContainers - numRequestedContainers.get();
numRequestedContainers.addAndGet(askCount);
if (askCount > 0) {
for (int i = 0; i < askCount; ++i) {
AMRMClient.ContainerRequest containerAsk = setupContainerAskForRM();
amRMClient.addContainerRequest(containerAsk);
}
}
if (numCompletedContainers.get() == numTotalContainers) {
done = true;
}
}
@Override
public void onContainersAllocated(List<Container> allocatedContainers) {
LOG.info("Got response from RM for container ask, allocatedCnt="
+ allocatedContainers.size());
numAllocatedContainers.addAndGet(allocatedContainers.size());
for (Container allocatedContainer : allocatedContainers) {
LOG.info("Launching shell command on a new container."
+ ", containerId=" + allocatedContainer.getId()
+ ", containerNode=" + allocatedContainer.getNodeId().getHost()
+ ":" + allocatedContainer.getNodeId().getPort()
+ ", containerNodeURI=" + allocatedContainer.getNodeHttpAddress()
+ ", containerResourceMemory"
+ allocatedContainer.getResource().getMemory()
+ ", containerResourceVirtualCores"
+ allocatedContainer.getResource().getVirtualCores());
Thread launchThread = createLaunchContainerThread(allocatedContainer);
// launch and start the container on a separate thread to keep
// the main thread unblocked
// as all containers may not be allocated at one go.
launchThreads.add(launchThread);
launchedContainers.add(allocatedContainer.getId());
launchThread.start();
}
}
@Override
public void onShutdownRequest() {
done = true;
}
@Override
public void onNodesUpdated(List<NodeReport> list) {
}
@Override
public float getProgress() {
// set progress to deliver to RM on next heartbeat
float progress = (float) numCompletedContainers.get()
/ numTotalContainers;
return progress;
}
@Override
public void onError(Throwable throwable) {
done = true;
amRMClient.stop();
}
}
@VisibleForTesting
static class NMCallbackHandler implements NMClientAsync.CallbackHandler {
private ConcurrentMap<ContainerId, Container> containers = new ConcurrentHashMap<ContainerId, Container>();
private final ApplicationMaster applicationMaster;
public NMCallbackHandler(ApplicationMaster applicationMaster) {
this.applicationMaster = applicationMaster;
}
public void addContainer(ContainerId containerId, Container container) {
containers.putIfAbsent(containerId, container);
}
@Override
public void onContainerStopped(ContainerId containerId) {
if (LOG.isDebugEnabled()) {
LOG.debug("Succeeded to stop Container " + containerId);
}
containers.remove(containerId);
}
@Override
public void onContainerStatusReceived(ContainerId containerId,
ContainerStatus containerStatus) {
if (LOG.isDebugEnabled()) {
LOG.debug("Container Status: id=" + containerId + ", status="
+ containerStatus);
}
}
@Override
public void onContainerStarted(ContainerId containerId,
Map<String, ByteBuffer> allServiceResponse) {
if (LOG.isDebugEnabled()) {
LOG.debug("Succeeded to start Container " + containerId);
}
Container container = containers.get(containerId);
if (container != null) {
applicationMaster.nmClientAsync.getContainerStatusAsync(containerId,
container.getNodeId());
}
}
@Override
public void onStartContainerError(ContainerId containerId, Throwable t) {
LOG.error("Failed to start Container " + containerId);
containers.remove(containerId);
applicationMaster.numCompletedContainers.incrementAndGet();
applicationMaster.numFailedContainers.incrementAndGet();
}
@Override
public void onGetContainerStatusError(ContainerId containerId, Throwable t) {
LOG.error("Failed to query the status of Container " + containerId);
}
@Override
public void onStopContainerError(ContainerId containerId, Throwable t) {
LOG.error("Failed to stop Container " + containerId);
containers.remove(containerId);
}
}
/**
* Thread to connect to the {@link ContainerManagementProtocol} and launch the
* container that will execute the shell command.
*/
private class LaunchContainerRunnable implements Runnable {
// Allocated container
Container container;
NMCallbackHandler containerListener;
Configuration conf;
/**
* @param lcontainer Allocated container
* @param containerListener Callback handler of the container
*/
public LaunchContainerRunnable(Container lcontainer,
NMCallbackHandler containerListener, Configuration conf) {
this.container = lcontainer;
this.containerListener = containerListener;
this.conf = conf;
}
/**
* Connects to CM, sets up container launch context for shell command and
* eventually dispatches the container start request to the CM.
*/
@Override
public void run() {
LOG.info("Setting up container launch container for containerid="
+ container.getId());
// Now we setup a ContainerLaunchContext
ContainerLaunchContext ctx = Records
.newRecord(ContainerLaunchContext.class);
// Set the local resources
Map<String, LocalResource> localResources = new HashMap<String, LocalResource>();
LocalResource packageResource = Records.newRecord(LocalResource.class);
FileSystem fs = null;
try {
fs = FileSystem.get(conf);
} catch (IOException e) {
e.printStackTrace();
}
Path packageFile = new Path(
System.getenv(YARNBSPConstants.HAMA_YARN_LOCATION));
URL packageUrl = null;
try {
packageUrl = ConverterUtils.getYarnUrlFromPath(packageFile
.makeQualified(fs.getUri(), fs.getWorkingDirectory()));
LOG.info("PackageURL has been composed to " + packageUrl.toString());
LOG.info("Reverting packageURL to path: "
+ ConverterUtils.getPathFromYarnURL(packageUrl));
} catch (URISyntaxException e) {
LOG.fatal("If you see this error the workarround does not work", e);
numCompletedContainers.incrementAndGet();
numFailedContainers.incrementAndGet();
return;
}
packageResource.setResource(packageUrl);
packageResource.setSize(Long.parseLong(System
.getenv(YARNBSPConstants.HAMA_YARN_SIZE)));
packageResource.setTimestamp(Long.parseLong(System
.getenv(YARNBSPConstants.HAMA_YARN_TIMESTAMP)));
packageResource.setType(LocalResourceType.FILE);
packageResource.setVisibility(LocalResourceVisibility.APPLICATION);
localResources.put(YARNBSPConstants.APP_MASTER_JAR_PATH, packageResource);
Path hamaReleaseFile = new Path(
System.getenv(YARNBSPConstants.HAMA_LOCATION));
URL hamaReleaseUrl = ConverterUtils.getYarnUrlFromPath(hamaReleaseFile
.makeQualified(fs.getUri(), fs.getWorkingDirectory()));
LOG.info("Hama release URL has been composed to "
+ hamaReleaseUrl.toString());
RemoteIterator<LocatedFileStatus> fileStatusListIterator = null;
try {
fileStatusListIterator = fs.listFiles(hamaReleaseFile, true);
while (fileStatusListIterator.hasNext()) {
LocatedFileStatus lfs = fileStatusListIterator.next();
LocalResource localRsrc = LocalResource.newInstance(
ConverterUtils.getYarnUrlFromPath(lfs.getPath()),
LocalResourceType.FILE, LocalResourceVisibility.APPLICATION,
lfs.getLen(), lfs.getModificationTime());
localResources.put(lfs.getPath().getName(), localRsrc);
}
} catch (IOException e) {
LOG.fatal("The error has occured to RemoteIterator " + e);
}
ctx.setLocalResources(localResources);
/*
* TODO Package classpath seems not to work if you're in pseudo
* distributed mode, because the resource must not be moved, it will never
* be unpacked. So we will check if our jar file has the file:// prefix
* and put it into the CP directly
*/
StringBuilder classPathEnv = new StringBuilder(
ApplicationConstants.Environment.CLASSPATH.$()).append(
File.pathSeparatorChar).append("./*");
for (String c : conf.getStrings(
YarnConfiguration.YARN_APPLICATION_CLASSPATH,
YarnConfiguration.DEFAULT_YARN_APPLICATION_CLASSPATH)) {
classPathEnv.append(File.pathSeparatorChar);
classPathEnv.append(c.trim());
}
Vector<CharSequence> vargs = new Vector<CharSequence>();
vargs.add("${JAVA_HOME}/bin/java");
vargs.add("-cp " + classPathEnv + "");
vargs.add(BSPRunner.class.getCanonicalName());
vargs.add(jobId.getJtIdentifier());
vargs.add(Integer.toString(ai.incrementAndGet()));
vargs.add(new Path(jobFile).makeQualified(fs.getUri(),
fs.getWorkingDirectory()).toString());
vargs.add("1>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR
+ "/hama-worker.stdout");
vargs.add("2>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR
+ "/hama-worker.stderr");
// Get final commmand
StringBuilder command = new StringBuilder();
for (CharSequence str : vargs) {
command.append(str).append(" ");
}
List<String> commands = new ArrayList<String>();
commands.add(command.toString());
ctx.setCommands(commands);
ctx.setTokens(allTokens.duplicate());
LOG.info("Starting commands: " + commands);
containerListener.addContainer(container.getId(), container);
nmClientAsync.startContainerAsync(container, ctx);
}
}
/**
* Setup the request that will be sent to the RM for the container ask.
*
* @return the setup ResourceRequest to be sent to RM
*/
private AMRMClient.ContainerRequest setupContainerAskForRM() {
// setup requirements for hosts
// using * as any host will do for the distributed shell app
// set the priority for the request
// TODO - what is the range for priority? how to decide?
Priority pri = Priority.newInstance(requestPriority);
// Set up resource type requirements
// For now, memory and CPU are supported so we set memory and cpu
// requirements
Resource capability = Resource.newInstance(containerMemory,
containerVirtualCores);
AMRMClient.ContainerRequest request = new AMRMClient.ContainerRequest(
capability, null, null, pri);
LOG.info("Requested container ask: " + request.toString());
return request;
}
/**
* Reads the configuration from the given path.
*/
private static Configuration getSubmitConfiguration(String path)
throws IOException {
Path jobSubmitPath = new Path(path);
Configuration jobConf = new HamaConfiguration();
FileSystem fs = FileSystem.get(URI.create(path), jobConf);
InputStream in = fs.open(jobSubmitPath);
jobConf.addResource(in);
return jobConf;
}
/**
* Gets the application attempt ID from the environment. This should be set by
* YARN when the container has been launched.
*
* @return a new ApplicationAttemptId which is unique and identifies this
* task.
*/
private static ApplicationAttemptId getApplicationAttemptId()
throws IOException {
Map<String, String> envs = System.getenv();
if (!envs.containsKey(ApplicationConstants.Environment.CONTAINER_ID.name())) {
throw new IllegalArgumentException(
"ApplicationAttemptId not set in the environment");
}
ContainerId containerId = ConverterUtils.toContainerId(envs
.get(ApplicationConstants.Environment.CONTAINER_ID.name()));
return containerId.getApplicationAttemptId();
}
/**
* This method starts the sync server on a specific port and waits for it to
* come up. Be aware that this method adds the "bsp.sync.server.address" that
* is needed for a task to connect to the service.
*
* @throws IOException
*/
private void startSyncServer() throws Exception {
syncServer = SyncServiceFactory.getSyncServer(jobConf);
syncServer.init(jobConf);
ZKServerThread serverThread = new ZKServerThread(syncServer);
threadPool.submit(serverThread);
}
/**
* This method is to run Zookeeper in order to coordinates between BSPMaster
* and Groomservers using Runnable interface in java.
*/
private static class ZKServerThread implements Runnable {
SyncServer server;
ZKServerThread(SyncServer s) {
server = s;
}
@Override
public void run() {
try {
server.start();
} catch (Exception e) {
LOG.error("Error running SyncServer.", e);
}
}
}
/**
* This method starts the needed RPC servers: client server and the task
* server. This method manipulates the configuration and therefore needs to be
* executed BEFORE the submitconfiguration gets rewritten.
*
* @throws IOException
*/
private void startRPCServers() throws IOException {
// start the RPC server which talks to the client
this.clientServer = RPC.getServer(BSPClient.class, hostname, clientPort,
jobConf);
this.clientServer.start();
// start the RPC server which talks to the tasks
this.taskServerPort = BSPNetUtils.getFreePort(10000);
this.taskServer = RPC.getServer(this, hostname, taskServerPort, jobConf);
this.taskServer.start();
// readjusting the configuration to let the tasks know where we are.
this.jobConf.set("hama.umbilical.address", hostname + ":" + taskServerPort);
}
/**
* Writes the current configuration to a given path to reflect changes. For
* example the sync server address is put after the file has been written.
*/
private static void rewriteSubmitConfiguration(String path, Configuration conf)
throws IOException {
Path jobSubmitPath = new Path(path);
FileSystem fs = FileSystem.get(conf);
FSDataOutputStream out = fs.create(jobSubmitPath);
conf.writeXml(out);
out.close();
LOG.info("Written new configuration back to " + path);
}
/**
* Get container memory from "bsp.child.mem.in.mb" set on Hama configuration
*
* @return The memory of container.
*/
private int getMemoryRequirements(Configuration conf) {
String newMemoryProperty = conf.get("bsp.child.mem.in.mb");
if (newMemoryProperty == null) {
LOG.warn("\"bsp.child.mem.in.mb\" was not set! Try parsing the child opts...");
return getMemoryFromOptString(conf.get("bsp.child.java.opts"));
} else {
return Integer.valueOf(newMemoryProperty);
}
}
// This really needs a testcase
private static int getMemoryFromOptString(String opts) {
final int DEFAULT_MEMORY_MB = 256;
if (opts == null) {
return DEFAULT_MEMORY_MB;
}
if (!opts.contains("-Xmx")) {
LOG.info("No \"-Xmx\" option found in child opts, using default amount of memory!");
return DEFAULT_MEMORY_MB;
} else {
// e.G: -Xmx512m
int startIndex = opts.indexOf("-Xmx") + 4;
String xmxString = opts.substring(startIndex);
char qualifier = xmxString.charAt(xmxString.length() - 1);
int memory = Integer.valueOf(xmxString.substring(0,
xmxString.length() - 1));
if (qualifier == 'm') {
return memory;
} else if (qualifier == 'g') {
return memory * 1024;
} else {
throw new IllegalArgumentException(
"Memory Limit in child opts was not set! \"bsp.child.java.opts\" String was: "
+ opts);
}
}
}
@VisibleForTesting
Thread createLaunchContainerThread(Container allocatedContainer) {
LaunchContainerRunnable runnableLaunchContainer = new LaunchContainerRunnable(
allocatedContainer, containerListener, jobConf);
return new Thread(runnableLaunchContainer);
}
@Override
public LongWritable getCurrentSuperStep() {
return new LongWritable(superstep);
}
@Override
public Task getTask(TaskAttemptID taskid) throws IOException {
BSPJobClient.RawSplit assignedSplit = null;
String splitName = NullInputFormat.NullInputSplit.class.getName();
if (splits != null) {
assignedSplit = splits[taskid.id];
splitName = assignedSplit.getClassName();
return new BSPTask(jobId, jobFile, taskid, taskid.id, splitName,
assignedSplit.getBytes());
} else {
return new BSPTask(jobId, jobFile, taskid, taskid.id, splitName,
new BytesWritable());
}
}
@Override
public boolean ping(TaskAttemptID taskid) throws IOException {
return false;
}
@Override
public void done(TaskAttemptID taskid) throws IOException {
}
@Override
public void fsError(TaskAttemptID taskId, String message) throws IOException {
}
@Override
public void fatalError(TaskAttemptID taskId, String message)
throws IOException {
}
@Override
public boolean statusUpdate(TaskAttemptID taskId, TaskStatus taskStatus)
throws IOException, InterruptedException {
if (taskStatus.getSuperstepCount() > superstep) {
superstep = taskStatus.getSuperstepCount();
LOG.info("Now in superstep " + superstep);
}
Counters counters = taskStatus.getCounters();
globalCounter.incrAllCounters(counters);
return true;
}
@Override
public int getAssignedPortNum(TaskAttemptID taskid) {
return 0;
}
@Override
public void close() throws IOException {
this.clientServer.stop();
this.taskServer.stop();
this.syncServer.stopServer();
threadPool.shutdown();
}
@Override
public long getProtocolVersion(String protocol, long clientVersion)
throws IOException {
return BSPClient.versionID;
}
}
|
apache/directory-scimple | 35,805 | scim-tools/src/test/java/org/apache/directory/scim/tools/diff/PatchGeneratorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.directory.scim.tools.diff;
import static org.assertj.core.api.Assertions.assertThat;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.directory.scim.spec.resources.GroupMembership;
import org.apache.directory.scim.test.stub.ExampleObjectExtension;
import org.apache.directory.scim.test.stub.Subobject;
import org.apache.directory.scim.core.schema.SchemaRegistry;
import org.apache.directory.scim.spec.extension.EnterpriseExtension;
import org.apache.directory.scim.spec.extension.EnterpriseExtension.Manager;
import org.apache.directory.scim.spec.filter.FilterParseException;
import org.apache.directory.scim.spec.patch.PatchOperation;
import org.apache.directory.scim.spec.patch.PatchOperationPath;
import org.apache.directory.scim.spec.phonenumber.PhoneNumberParseException;
import org.apache.directory.scim.spec.resources.Address;
import org.apache.directory.scim.spec.resources.Email;
import org.apache.directory.scim.spec.resources.Name;
import org.apache.directory.scim.spec.resources.PhoneNumber;
import org.apache.directory.scim.spec.resources.PhoneNumber.GlobalPhoneNumberBuilder;
import org.apache.directory.scim.spec.resources.Photo;
import org.apache.directory.scim.spec.resources.ScimGroup;
import org.apache.directory.scim.spec.resources.ScimUser;
import org.apache.directory.scim.spec.schema.Schemas;
import org.assertj.core.api.Condition;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
import static java.util.Collections.emptyList;
import static org.apache.directory.scim.test.assertj.ScimpleAssertions.patchOpMatching;
import static org.apache.directory.scim.test.assertj.ScimpleAssertions.scimAssertThat;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class PatchGeneratorTest {
private static final String FIRST = "first";
private static final String SECOND = "second";
private static final String THIRD = "third";
private static final String FOURTH = "fourth";
private static final String A = "A";
private static final String B = "B";
private static final String C = "C";
private SchemaRegistry schemaRegistry;
@BeforeEach
public void initialize() throws Exception {
schemaRegistry = mock(SchemaRegistry.class);
when(schemaRegistry.getSchema(ScimUser.SCHEMA_URI)).thenReturn(Schemas.schemaFor(ScimUser.class));
when(schemaRegistry.getSchema(EnterpriseExtension.URN)).thenReturn(Schemas.schemaForExtension(EnterpriseExtension.class));
when(schemaRegistry.getSchema(ExampleObjectExtension.URN)).thenReturn(Schemas.schemaForExtension(ExampleObjectExtension.class));
when(schemaRegistry.getSchema(ScimGroup.SCHEMA_URI)).thenReturn(Schemas.schemaFor(ScimGroup.class));
}
@Test
public void testAddSingleAttribute() throws Exception {
ScimUser user1 = createUser();
ScimUser user2 = createUser();
user2.setNickName("Jon");
List<PatchOperation> result = new PatchGenerator(schemaRegistry).diff(user1, user2);
scimAssertThat(result).single()
.matches(PatchOperation.Type.ADD, "nickName", "Jon");
}
@Test
public void testAddSingleExtension() throws Exception {
ScimUser user1 = createUser();
EnterpriseExtension ext = user1.removeExtension(EnterpriseExtension.class);
ScimUser user2 = createUser();
user2.addExtension(ext);
List<PatchOperation> result = new PatchGenerator(schemaRegistry).diff(user1, user2);
scimAssertThat(result).single()
.matches(PatchOperation.Type.ADD, "urn:ietf:params:scim:schemas:extension:enterprise:2.0:User", ext);
}
@Test
public void testAddComplexAttribute() throws Exception {
ScimUser user1 = createUser();
ScimUser user2 = createUser();
user2.getName()
.setHonorificPrefix("Dr.");
List<PatchOperation> result = new PatchGenerator(schemaRegistry).diff(user1, user2);
scimAssertThat(result).single()
.matches(PatchOperation.Type.ADD, "name.honorificPrefix", "Dr.");
}
@Test
public void testAddMultiValuedAttribute() throws Exception {
ScimUser user1 = createUser();
ScimUser user2 = createUser();
PhoneNumber mobilePhone = new GlobalPhoneNumberBuilder().globalNumber("+1(814)867-5306").build();
mobilePhone.setType("mobile");
mobilePhone.setPrimary(false);
user2.getPhoneNumbers().add(mobilePhone);
List<PatchOperation> result = new PatchGenerator(schemaRegistry).diff(user1, user2);
scimAssertThat(result).single()
.matches(PatchOperation.Type.ADD, "phoneNumbers", mobilePhone);
}
/**
* This unit test is to replicate the issue where a replace is sent back
* from the differencing engine for a collection that is currently empty
* but is having an object added to it. This should produce an ADD with an
* ArrayList of objects to add.
*/
@Test
public void testAddObjectToEmptyCollection() throws Exception {
ScimUser user1 = createUser();
user1.setPhoneNumbers(new ArrayList<>());
ScimUser user2 = createUser();
user2.setPhoneNumbers(new ArrayList<>());
PhoneNumber mobilePhone = new GlobalPhoneNumberBuilder().globalNumber("+1(814)867-5306").build();
mobilePhone.setType("mobile");
mobilePhone.setPrimary(true);
user2.getPhoneNumbers().add(mobilePhone);
List<PatchOperation> operations = new PatchGenerator(schemaRegistry).diff(user1, user2);
assertNotNull(operations);
assertThat(operations).hasSize(1);
PatchOperation operation = operations.get(0);
assertNotNull(operation.getValue());
assertEquals(PatchOperation.Type.ADD, operation.getOperation());
assertEquals(PhoneNumber.class, operation.getValue().getClass());
}
@Test
public void testAddObjectsToEmptyCollection() throws Exception {
ScimUser user1 = createUser();
user1.setPhoneNumbers(new ArrayList<>());
ScimUser user2 = createUser();
user2.setPhoneNumbers(new ArrayList<>());
PhoneNumber mobilePhone = new GlobalPhoneNumberBuilder().globalNumber("+1(814)867-5306").build();
mobilePhone.setType("mobile");
mobilePhone.setPrimary(true);
PhoneNumber homePhone = new GlobalPhoneNumberBuilder().globalNumber("+1(814)867-5307").build();
homePhone.setType("home");
homePhone.setPrimary(true);
user2.getPhoneNumbers().add(mobilePhone);
user2.getPhoneNumbers().add(homePhone);
List<PatchOperation> operations = new PatchGenerator(schemaRegistry).diff(user1, user2);
assertNotNull(operations);
assertEquals(2, operations.size());
PatchOperation operation = operations.get(0);
assertNotNull(operation.getValue());
assertEquals(PatchOperation.Type.ADD, operation.getOperation());
assertEquals(PhoneNumber.class, operation.getValue().getClass());
operation = operations.get(1);
assertNotNull(operation.getValue());
assertEquals(PatchOperation.Type.ADD, operation.getOperation());
assertEquals(PhoneNumber.class, operation.getValue().getClass());
}
@Test
public void testReplaceSingleAttribute() throws Exception {
ScimUser user1 = createUser();
ScimUser user2 = createUser();
user2.setActive(false);
List<PatchOperation> result = new PatchGenerator(schemaRegistry).diff(user1, user2);
scimAssertThat(result).single()
.matches(PatchOperation.Type.REPLACE, "active", false);
}
@Test
public void testReplaceExtensionSingleAttribute() throws Exception {
ScimUser user1 = createUser();
ScimUser user2 = createUser();
user2.getExtension(EnterpriseExtension.class).setDepartment("Dept XYZ.");
List<PatchOperation> result = new PatchGenerator(schemaRegistry).diff(user1, user2);
scimAssertThat(result).single()
.matches(PatchOperation.Type.REPLACE, "urn:ietf:params:scim:schemas:extension:enterprise:2.0:User:department", "Dept XYZ.");
}
@Test
public void testReplaceComplexAttribute() throws Exception {
ScimUser user1 = createUser();
ScimUser user2 = createUser();
user2.getName()
.setFamilyName("Nobody");
List<PatchOperation> result = new PatchGenerator(schemaRegistry).diff(user1, user2);
scimAssertThat(result).single()
.matches(PatchOperation.Type.REPLACE, "name.familyName", "Nobody");
}
@Test
public void testReplaceMultiValuedAttribute() throws Exception {
ScimUser user1 = createUser();
ScimUser user2 = createUser();
Email workEmail = user2.getEmails().get(1);
workEmail.setValue("nobody@example.com");
workEmail.setDisplay("nobody@example.com");
List<PatchOperation> result = new PatchGenerator(schemaRegistry).diff(user1, user2);
// Changing contents of a collection, should REMOVE the old and ADD a new
scimAssertThat(result).containsOnly(
patchOpMatching(PatchOperation.Type.REMOVE, "emails[type EQ \"work\"]"),
patchOpMatching(PatchOperation.Type.ADD, "emails", workEmail));
}
@Test
public void testRemoveSingleAttribute() throws Exception {
ScimUser user1 = createUser();
ScimUser user2 = createUser();
user2.setUserName(null);
List<PatchOperation> result = new PatchGenerator(schemaRegistry).diff(user1, user2);
scimAssertThat(result).single()
.matches(PatchOperation.Type.REMOVE, "userName", null);
}
@Test
public void testRemoveSingleExtension() throws Exception {
ScimUser user1 = createUser();
ScimUser user2 = createUser();
user2.removeExtension(EnterpriseExtension.class);
List<PatchOperation> result = new PatchGenerator(schemaRegistry).diff(user1, user2);
scimAssertThat(result).single()
.matches(PatchOperation.Type.REMOVE, "urn:ietf:params:scim:schemas:extension:enterprise:2.0:User", null);
}
@Test
public void testRemoveComplexAttribute() throws Exception {
ScimUser user1 = createUser();
ScimUser user2 = createUser();
user2.getName()
.setMiddleName(null);
List<PatchOperation> result = new PatchGenerator(schemaRegistry).diff(user1, user2);
scimAssertThat(result).single()
.matches(PatchOperation.Type.REMOVE, "name.middleName", null);
}
@Test
public void testRemoveFullComplexAttribute() throws Exception {
ScimUser user1 = createUser();
ScimUser user2 = createUser();
user2.setName(null);
List<PatchOperation> result = new PatchGenerator(schemaRegistry).diff(user1, user2);
scimAssertThat(result).single()
.matches(PatchOperation.Type.REMOVE, "name", null);
}
@Test
public void testRemoveMultiValuedAttribute() throws Exception {
ScimUser user1 = createUser();
ScimUser user2 = createUser();
List<Email> newEmails = user2.getEmails()
.stream()
.filter(e -> e.getType()
.equals("work"))
.collect(Collectors.toList());
user2.setEmails(newEmails);
List<PatchOperation> result = new PatchGenerator(schemaRegistry).diff(user1, user2);
scimAssertThat(result).single()
.matches(PatchOperation.Type.REMOVE, "emails[type EQ \"home\"]", null);
}
@Test
public void testRemoveMultiValuedAttributeWithSorting() throws Exception {
ScimUser user1 = createUser();
ScimUser user2 = createUser();
Address localAddress = new Address();
localAddress.setStreetAddress("123 Main Street");
localAddress.setLocality("State College");
localAddress.setRegion("PA");
localAddress.setCountry("USA");
localAddress.setType("local");
user1.getAddresses().add(localAddress);
List<PatchOperation> result = new PatchGenerator(schemaRegistry).diff(user1, user2);
scimAssertThat(result).single()
.matches(PatchOperation.Type.REMOVE, "addresses[type EQ \"local\"]", null);
}
@Test
public void testAddMultiValuedAttributeWithSorting() throws Exception {
ScimUser user1 = createUser();
ScimUser user2 = createUser();
Address localAddress = new Address();
localAddress.setStreetAddress("123 Main Street");
localAddress.setLocality("State College");
localAddress.setRegion("PA");
localAddress.setCountry("USA");
localAddress.setType("local");
Address newWorkAddress = user2.getAddresses().get(0);
newWorkAddress.setPostalCode("01234"); // changes whole address, expect REMOVE/ADD
user2.getAddresses().add(localAddress); // ADD new Address
List<PatchOperation> result = new PatchGenerator(schemaRegistry).diff(user1, user2);
scimAssertThat(result)
.containsOnly(
patchOpMatching(PatchOperation.Type.REMOVE, "addresses[type EQ \"work\"]"),
patchOpMatching(PatchOperation.Type.ADD, "addresses", newWorkAddress),
patchOpMatching(PatchOperation.Type.ADD, "addresses", localAddress));
}
@Test
public void verifyEmptyArraysDoNotCauseDiff() throws Exception {
ScimUser user1 = createUser();
ScimUser user2 = createUser();
user1.setPhotos(new ArrayList<>());
List<PatchOperation> operations = new PatchGenerator(schemaRegistry).diff(user1, user2);
scimAssertThat(operations)
.describedAs("Empty Arrays caused a diff")
.isEmpty();
}
@Test
public void verifyEmptyExtensionArraysDoNotCauseDiff() throws Exception {
ScimUser user1 = createUser();
ScimUser user2 = createUser();
ExampleObjectExtension ext1 = new ExampleObjectExtension();
user1.addExtension(ext1);
ExampleObjectExtension ext2 = new ExampleObjectExtension();
ext2.setList(new ArrayList<>());
user2.addExtension(ext2);
List<PatchOperation> operations = new PatchGenerator(schemaRegistry).diff(user1, user2);
scimAssertThat(operations)
.describedAs("Empty Arrays caused a diff")
.isEmpty();
}
@Test
public void verifyEmptyArraysAreNulled() throws Exception {
ScimUser user1 = createUser();
ScimUser user2 = createUser();
PatchGenerator patchGenerator = new PatchGenerator(schemaRegistry);
//Set empty list on root object and verify no differences
user1.setPhotos(new ArrayList<>());
List<PatchOperation> operations = patchGenerator.diff(user1, user2);
assertTrue(operations.isEmpty(), "Empty Arrays are not being nulled out");
//Reset user 1 and empty list on Extension and verify no differences
user1 = createUser();
ExampleObjectExtension ext = new ExampleObjectExtension();
ext.setList(new ArrayList<>());
operations = patchGenerator.diff(user1, user2);
assertTrue(operations.isEmpty(), "Empty Arrays are not being nulled out");
//Reset extension and set empty list on element of extension then verify no differences
Subobject subobject = new Subobject();
subobject.setList1(new ArrayList<>());
ext = new ExampleObjectExtension();
ext.setSubobject(subobject);
operations = patchGenerator.diff(user1, user2);
assertTrue(operations.isEmpty(), "Empty Arrays are not being nulled out");
}
@Test
public void testAddArray() throws Exception {
ScimUser user1 = createUser();
ScimUser user2 = createUser();
Photo photo = new Photo();
photo.setType("photo");
photo.setValue("photo1.png");
user2.setPhotos(Stream.of(photo).collect(Collectors.toList()));
List<PatchOperation> operations = new PatchGenerator(schemaRegistry).diff(user1, user2);
scimAssertThat(operations).containsOnly(
patchOpMatching(PatchOperation.Type.ADD, "photos", photo)
);
}
@Test
public void testAddExtensionArray() throws Exception {
ScimUser user1 = createUser();
ScimUser user2 = createUser();
ExampleObjectExtension ext1 = new ExampleObjectExtension();
ext1.setList(null);
user1.addExtension(ext1);
ExampleObjectExtension ext2 = new ExampleObjectExtension();
ext2.setList(List.of(FIRST,SECOND));
user2.addExtension(ext2);
List<PatchOperation> operations = new PatchGenerator(schemaRegistry).diff(user1, user2);
scimAssertThat(operations).containsOnly(
patchOpMatching(PatchOperation.Type.ADD, ExampleObjectExtension.URN + ":list", List.of(FIRST,SECOND))
);
}
@Test
public void testRemoveArray() throws Exception {
ScimUser user1 = createUser();
Photo photo = new Photo();
photo.setType("photo");
photo.setValue("photo1.png");
user1.setPhotos(List.of(photo));
ScimUser user2 = createUser();
List<PatchOperation> operations = new PatchGenerator(schemaRegistry).diff(user1, user2);
scimAssertThat(operations).containsOnly(
patchOpMatching(PatchOperation.Type.REMOVE, "photos")
);
}
@Test
public void testRemoveExtensionArray() throws Exception {
ScimUser user1 = createUser();
ExampleObjectExtension ext1 = new ExampleObjectExtension();
ext1.setList(List.of(FIRST,SECOND));
user1.addExtension(ext1);
ScimUser user2 = createUser();
ExampleObjectExtension ext2 = new ExampleObjectExtension();
ext2.setList(null);
user2.addExtension(ext2);
List<PatchOperation> operations = new PatchGenerator(schemaRegistry).diff(user1, user2);
scimAssertThat(operations).containsOnly(
patchOpMatching(PatchOperation.Type.REMOVE, ExampleObjectExtension.URN + ":list")
);
}
@Test
public void testNonTypedAttributeListGetUseablePath() throws Exception {
ScimUser user1 = createUser();
ScimUser user2 = createUser();
ExampleObjectExtension ext1 = new ExampleObjectExtension();
ext1.setList(List.of(FIRST,SECOND,THIRD));
user1.addExtension(ext1);
ExampleObjectExtension ext2 = new ExampleObjectExtension();
ext2.setList(List.of(FIRST,SECOND,FOURTH));
user2.addExtension(ext2);
List<PatchOperation> operations = new PatchGenerator(schemaRegistry).diff(user1, user2);
// Changing content of list should REMOVE old, and ADD new
scimAssertThat(operations).containsOnly(
patchOpMatching(PatchOperation.Type.REPLACE, ExampleObjectExtension.URN + ":list", List.of(FIRST,SECOND,FOURTH)));
}
@Test
public void testMoveFormatNameToNicknamePart1() throws Exception {
ScimUser user1 = createUser();
ScimUser user2 = createUser();
String nickname = "John Xander Anyman";
user1.setNickName(nickname);
user2.getName().setFormatted(nickname);
List<PatchOperation> operations = new PatchGenerator(schemaRegistry).diff(user1, user2);
scimAssertThat(operations).containsOnly(
patchOpMatching(PatchOperation.Type.ADD, "name.formatted", nickname),
patchOpMatching(PatchOperation.Type.REMOVE, "nickName", null)
);
}
@Test
public void testMoveFormatNameToNicknamePart2() throws Exception {
ScimUser user1 = createUser();
ScimUser user2 = createUser();
String nickname = "John Xander Anyman";
user1.setNickName(nickname);
user2.setNickName("");
user1.getName().setFormatted("");
user2.getName().setFormatted(nickname);
List<PatchOperation> operations = new PatchGenerator(schemaRegistry).diff(user1, user2);
scimAssertThat(operations).containsOnly(
patchOpMatching(PatchOperation.Type.REPLACE, "name.formatted", nickname),
patchOpMatching(PatchOperation.Type.REPLACE, "nickName", ""));
}
@Test
public void testMoveFormatNameToNicknamePart3() throws Exception {
ScimUser user1 = createUser();
ScimUser user2 = createUser();
String nickname = "John Xander Anyman";
user1.setNickName(nickname);
user2.setNickName(null);
user1.getName().setFormatted("");
user2.getName().setFormatted(nickname);
List<PatchOperation> operations = new PatchGenerator(schemaRegistry).diff(user1, user2);
scimAssertThat(operations).containsOnly(
patchOpMatching(PatchOperation.Type.REPLACE, "name.formatted", nickname),
patchOpMatching(PatchOperation.Type.REMOVE, "nickName"));
}
@Test
public void testMoveFormatNameToNicknamePart4() throws Exception {
ScimUser user1 = createUser();
ScimUser user2 = createUser();
String nickname = "John Xander Anyman";
user1.setNickName(nickname);
user2.setNickName("");
user1.getName().setFormatted(null);
user2.getName().setFormatted(nickname);
List<PatchOperation> operations = new PatchGenerator(schemaRegistry).diff(user1, user2);
scimAssertThat(operations).containsOnly(
patchOpMatching(PatchOperation.Type.ADD, "name.formatted", nickname),
patchOpMatching(PatchOperation.Type.REPLACE, "nickName", ""));
}
@Test
public void testMoveFormatNameToNicknamePart5() throws Exception {
ScimUser user1 = createUser();
ScimUser user2 = createUser();
String nickname = "John Xander Anyman";
user1.setNickName("");
user2.setNickName(nickname);
user1.getName().setFormatted(nickname);
user2.getName().setFormatted(null);
List<PatchOperation> operations = new PatchGenerator(schemaRegistry).diff(user1, user2);
scimAssertThat(operations).containsOnly(
patchOpMatching(PatchOperation.Type.REMOVE, "name.formatted"),
patchOpMatching(PatchOperation.Type.REPLACE, "nickName", nickname));
}
@ParameterizedTest
@MethodSource("testListOfStringsParameters")
public void testListOfStringsParameterized(List<String> list1, List<String> list2, List<Condition<PatchOperation>> opConditions) throws Exception {
ScimUser user1 = createUser();
ScimUser user2 = createUser();
ExampleObjectExtension ext1 = new ExampleObjectExtension();
ext1.setList(list1);
user1.addExtension(ext1);
ExampleObjectExtension ext2 = new ExampleObjectExtension();
ext2.setList(list2);
user2.addExtension(ext2);
List<PatchOperation> operations = new PatchGenerator(schemaRegistry).diff(user1, user2);
scimAssertThat(operations).containsOnly(opConditions);
}
@SuppressWarnings("unused")
private static Object[] testListOfStringsParameters() throws Exception {
List<Object> params = new ArrayList<>();
String nickName = "John Xander Anyman";
//Parameter order
//1 Original list of Strings
//2 Update list of Strings
//3 Array of Expected Operations
// 3a Operation
// 3b Path
// 3c Value
List<Condition<PatchOperation>> multipleOps = new ArrayList<>();
multipleOps.add(patchOpMatching(PatchOperation.Type.ADD, "urn:ietf:params:scim:schemas:extension:example:2.0:Object:list", "A"));
multipleOps.add(patchOpMatching(PatchOperation.Type.ADD, "urn:ietf:params:scim:schemas:extension:example:2.0:Object:list", "B"));
multipleOps.add(patchOpMatching(PatchOperation.Type.ADD, "urn:ietf:params:scim:schemas:extension:example:2.0:Object:list", "C"));
params.add(new Object[] {List.of(A), emptyList(), List.of(patchOpMatching(PatchOperation.Type.REMOVE, "urn:ietf:params:scim:schemas:extension:example:2.0:Object:list"))});
params.add(new Object[] {List.of(A), null, List.of(patchOpMatching(PatchOperation.Type.REMOVE, "urn:ietf:params:scim:schemas:extension:example:2.0:Object:list"))});
params.add(new Object[] {null, List.of(A), List.of(patchOpMatching(PatchOperation.Type.ADD, "urn:ietf:params:scim:schemas:extension:example:2.0:Object:list", List.of(A)))});
params.add(new Object[] {null, List.of(C,B,A), List.of(patchOpMatching(PatchOperation.Type.ADD, "urn:ietf:params:scim:schemas:extension:example:2.0:Object:list", List.of(C,B,A)))});
params.add(new Object[] {List.of(A,B,C), emptyList(), List.of(patchOpMatching(PatchOperation.Type.REMOVE, "urn:ietf:params:scim:schemas:extension:example:2.0:Object:list"))});
params.add(new Object[] {List.of(C,B,A), emptyList(), List.of(patchOpMatching(PatchOperation.Type.REMOVE, "urn:ietf:params:scim:schemas:extension:example:2.0:Object:list"))});
params.add(new Object[] {emptyList(), List.of(A), List.of(patchOpMatching(PatchOperation.Type.ADD, "urn:ietf:params:scim:schemas:extension:example:2.0:Object:list", List.of(A)))});
params.add(new Object[] {emptyList(), List.of(C,B,A), List.of(patchOpMatching(PatchOperation.Type.ADD, "urn:ietf:params:scim:schemas:extension:example:2.0:Object:list", List.of(C,B,A)))});
params.add(new Object[] {List.of(A, B), List.of(B), List.of(patchOpMatching(PatchOperation.Type.REPLACE, "urn:ietf:params:scim:schemas:extension:example:2.0:Object:list", List.of(B)))});
params.add(new Object[] {List.of(B, A), List.of(B), List.of(patchOpMatching(PatchOperation.Type.REPLACE, "urn:ietf:params:scim:schemas:extension:example:2.0:Object:list", List.of(B)))});
params.add(new Object[] {List.of(B), List.of(A,B), List.of(patchOpMatching(PatchOperation.Type.REPLACE, "urn:ietf:params:scim:schemas:extension:example:2.0:Object:list", List.of(A,B)))});
params.add(new Object[] {List.of(B), List.of(B,A), List.of(patchOpMatching(PatchOperation.Type.REPLACE, "urn:ietf:params:scim:schemas:extension:example:2.0:Object:list", List.of(B,A)))});
params.add(new Object[] {List.of(A), List.of(A,B,C), List.of(patchOpMatching(PatchOperation.Type.REPLACE, "urn:ietf:params:scim:schemas:extension:example:2.0:Object:list", List.of(A,B,C)))});
return params.toArray();
}
@Test
public void testMultiplePrimitivesInArray() throws Exception {
ScimUser user1 = createUser();
ScimUser user2 = createUser();
ExampleObjectExtension ext1 = new ExampleObjectExtension();
ext1.setList(List.of("D","M","Y","Z","Z","Z","Z","Z"));
user1.addExtension(ext1);
ExampleObjectExtension ext2 = new ExampleObjectExtension();
ext2.setList(List.of("A","Z"));
user2.addExtension(ext2);
List<PatchOperation> operations = new PatchGenerator(schemaRegistry).diff(user1, user2);
scimAssertThat(operations).contains(
patchOpMatching(PatchOperation.Type.REPLACE, ExampleObjectExtension.URN + ":list", List.of("A","Z")));
}
@Test
public void testMoveFormatNameToNicknamePart6() throws Exception {
ScimUser user1 = createUser();
ScimUser user2 = createUser();
String nickname = "John Xander Anyman";
user1.setNickName(null);
user2.setNickName(nickname);
user1.getName().setFormatted(nickname);
user2.getName().setFormatted("");
List<PatchOperation> operations = new PatchGenerator(schemaRegistry).diff(user1, user2);
scimAssertThat(operations).containsOnly(
patchOpMatching(PatchOperation.Type.REPLACE, "name.formatted", ""),
patchOpMatching(PatchOperation.Type.ADD, "nickName", nickname));
}
/**
* This is used to test an error condition. In this scenario a user has multiple phone numbers where home is marked primary and work is not. A SCIM update
* is performed in which the new user only contains a work phone number where the type is null. When this happens it should only be a single DELETE
* operation. Instead it creates four operations: replace value of the home number with the work number value, replace the home type to work,
* remove the primary flag, and remove the work number
*/
@Test
public void testRemoveAndAddTypedElements() throws Exception {
ScimUser user1 = createUser();
ScimUser user2 = createUser();
// remove home number REMOVE
user2.getPhoneNumbers().remove(0);
// change work number REMOVE old and ADD new
PhoneNumber workNumber = user2.getPhoneNumbers().get(0);
assertNotNull(workNumber);
workNumber.setType(null);
List<PatchOperation> operations = new PatchGenerator(schemaRegistry).diff(user1, user2);
scimAssertThat(operations).contains(
patchOpMatching(PatchOperation.Type.REMOVE, "phoneNumbers[type EQ \"home\"]"),
patchOpMatching(PatchOperation.Type.REMOVE, "phoneNumbers[type EQ \"work\"]"),
patchOpMatching(PatchOperation.Type.ADD, "phoneNumbers", workNumber)
);
}
@Test
public void testGroupUpdate() throws FilterParseException {
ScimGroup group1 = new ScimGroup();
group1.setDisplayName("Test Group");
group1.setMembers(new ArrayList<>());
ScimGroup group2 = new ScimGroup();
group2.setDisplayName("Test Group - updated");
group2.setMembers(new ArrayList<>());
List<PatchOperation> operations = new PatchGenerator(schemaRegistry).diff(group1, group2);
scimAssertThat(operations).single()
.matches(PatchOperation.Type.REPLACE, "displayName", "Test Group - updated");
}
@Test
public void testAddGroupMember() throws FilterParseException {
ScimGroup group1 = new ScimGroup();
group1.setDisplayName("Test Group");
group1.setMembers(new ArrayList<>());
group1.getMembers().add(new GroupMembership()
.setType(GroupMembership.Type.USER)
.setValue("user1"));
ScimGroup group2 = new ScimGroup();
group2.setDisplayName("Test Group");
group2.setMembers(new ArrayList<>());
group2.getMembers().add(new GroupMembership()
.setType(GroupMembership.Type.USER)
.setValue("user1"));
GroupMembership user2Ref = new GroupMembership()
.setType(GroupMembership.Type.USER)
.setValue("user2");
group2.getMembers().add(user2Ref);
List<PatchOperation> operations = new PatchGenerator(schemaRegistry).diff(group1, group2);
assertNotNull(operations);
assertEquals(1, operations.size());
PatchOperation operation = operations.get(0);
scimAssertThat(operation).matches(PatchOperation.Type.ADD, "members", user2Ref);
}
@Test
public void testRemoveGroupMember() throws FilterParseException {
ScimGroup group1 = new ScimGroup();
group1.setDisplayName("Test Group");
group1.setMembers(new ArrayList<>());
group1.getMembers().add(new GroupMembership()
.setType(GroupMembership.Type.USER)
.setValue("user1"));
GroupMembership user2Ref = new GroupMembership()
.setType(GroupMembership.Type.USER)
.setValue("user2");
group1.getMembers().add(user2Ref);
ScimGroup group2 = new ScimGroup();
group2.setDisplayName("Test Group");
group2.setMembers(new ArrayList<>());
group2.getMembers().add(new GroupMembership()
.setType(GroupMembership.Type.USER)
.setValue("user1"));
List<PatchOperation> operations = new PatchGenerator(schemaRegistry).diff(group1, group2);
scimAssertThat(operations).contains(patchOpMatching(PatchOperation.Type.REMOVE, "members[value EQ \"user2\"]"));
}
@Test
public void testGroupReplace() throws FilterParseException {
ScimGroup group1 = new ScimGroup();
group1.setDisplayName("Test Group");
group1.setMembers(new ArrayList<>());
group1.getMembers().add(new GroupMembership()
.setType(GroupMembership.Type.USER)
.setValue("user1"));
ScimGroup group2 = new ScimGroup();
group2.setDisplayName("Test Group");
group2.setMembers(new ArrayList<>());
GroupMembership user2Ref = new GroupMembership()
.setType(GroupMembership.Type.USER)
.setValue("user2");
group2.getMembers().add(user2Ref);
List<PatchOperation> operations = new PatchGenerator(schemaRegistry).diff(group1, group2);
scimAssertThat(operations).containsOnly(
patchOpMatching(PatchOperation.Type.REMOVE, "members[value EQ \"user1\"]", null),
patchOpMatching(PatchOperation.Type.ADD, "members", user2Ref)
);
}
public static final Address createHomeAddress() {
Address homeAddress = new Address();
homeAddress.setType("home");
homeAddress.setStreetAddress("123 Fake Street");
homeAddress.setLocality("State College");
homeAddress.setRegion("Pennsylvania");
homeAddress.setCountry("USA");
homeAddress.setPostalCode("16801");
return homeAddress;
}
public static ScimUser createUser() throws PhoneNumberParseException {
ScimUser user = new ScimUser();
user.setId("912345678");
user.setExternalId("912345678");
user.setActive(true);
user.setDisplayName("John Anyman");
user.setTitle("Professor");
user.setUserName("jxa123");
Name name = new Name();
name.setGivenName("John");
name.setMiddleName("Xander");
name.setFamilyName("Anyman");
name.setHonorificSuffix("Jr.");
user.setName(name);
Address homeAddress = new Address();
homeAddress.setType("home");
homeAddress.setStreetAddress("123 Fake Street");
homeAddress.setLocality("State College");
homeAddress.setRegion("Pennsylvania");
homeAddress.setCountry("USA");
homeAddress.setPostalCode("16801");
Address workAddress = new Address();
workAddress.setType("work");
workAddress.setStreetAddress("2 Old Main");
workAddress.setLocality("State College");
workAddress.setRegion("Pennsylvania");
workAddress.setCountry("USA");
workAddress.setPostalCode("16802");
List<Address> address = Stream.of(workAddress, homeAddress).collect(Collectors.toList());
user.setAddresses(address);
Email workEmail = new Email();
workEmail.setPrimary(true);
workEmail.setType("work");
workEmail.setValue("jxa123@psu.edu");
workEmail.setDisplay("jxa123@psu.edu");
Email homeEmail = new Email();
homeEmail.setPrimary(true);
homeEmail.setType("home");
homeEmail.setValue("john@gmail.com");
homeEmail.setDisplay("john@gmail.com");
Email otherEmail = new Email();
otherEmail.setPrimary(true);
otherEmail.setType("other");
otherEmail.setValue("outside@version.net");
otherEmail.setDisplay("outside@version.net");
List<Email> emails = Stream.of(homeEmail, workEmail).collect(Collectors.toList());
user.setEmails(emails);
//"+1(814)867-5309"
PhoneNumber homePhone = new GlobalPhoneNumberBuilder().globalNumber("+1(814)867-5309").build();
homePhone.setType("home");
homePhone.setPrimary(true);
//"+1(814)867-5307"
PhoneNumber workPhone = new GlobalPhoneNumberBuilder().globalNumber("+1(814)867-5307").build();
workPhone.setType("work");
workPhone.setPrimary(false);
List<PhoneNumber> phones = Stream.of(homePhone, workPhone).collect(Collectors.toList());
user.setPhoneNumbers(phones);
EnterpriseExtension enterpriseExtension = new EnterpriseExtension();
enterpriseExtension.setEmployeeNumber("7865");
enterpriseExtension.setDepartment("Dept B.");
Manager manager = new Manager();
manager.setValue("Pointy Haired Boss");
manager.setRef("45353");
enterpriseExtension.setManager(manager);
user.addExtension(enterpriseExtension);
return user;
}
private List<PatchOperation> createUser1PatchOps() throws FilterParseException {
List<PatchOperation> patchOperations = new ArrayList<>();
PatchOperation removePhoneNumberOp = new PatchOperation();
removePhoneNumberOp.setOperation(PatchOperation.Type.REMOVE);
removePhoneNumberOp.setPath(PatchOperationPath.fromString("phoneNumbers[type eq \"home\"]"));
patchOperations.add(removePhoneNumberOp);
return patchOperations;
}
}
|
google/j2objc | 35,534 | jre_emul/android/platform/external/icu/android_icu4j/src/main/tests/android/icu/dev/test/rbbi/RBBITest.java | /* GENERATED SOURCE. DO NOT MODIFY. */
// © 2016 and later: Unicode, Inc. and others.
// License & terms of use: http://www.unicode.org/copyright.html#License
/*
*******************************************************************************
* Copyright (C) 1996-2016, International Business Machines Corporation and
* others. All Rights Reserved.
*******************************************************************************
*/
package android.icu.dev.test.rbbi;
//Regression testing of RuleBasedBreakIterator
//
// TODO: These tests should be mostly retired.
// Much of the test data that was originally here was removed when the RBBI rules
// were updated to match the Unicode boundary TRs, and the data was found to be invalid.
// Much of the remaining data has been moved into the rbbitst.txt test data file,
// which is common between ICU4C and ICU4J. The remaining test data should also be moved,
// or simply retired if it is no longer interesting.
import java.text.CharacterIterator;
import java.util.ArrayList;
import java.util.List;
import org.junit.Test;
import android.icu.dev.test.TestFmwk;
import android.icu.text.BreakIterator;
import android.icu.text.RuleBasedBreakIterator;
import android.icu.util.ULocale;
public class RBBITest extends TestFmwk {
public RBBITest() {
}
private static final String halfNA = "\u0928\u094d\u200d"; /*
* halfform NA = devanigiri NA + virama(supresses
* inherent vowel)+ zero width joiner
*/
// tests default rules based character iteration.
// Builds a new iterator from the source rules in the default (prebuilt) iterator.
//
@Test
public void TestDefaultRuleBasedCharacterIteration() {
RuleBasedBreakIterator rbbi = (RuleBasedBreakIterator) BreakIterator.getCharacterInstance();
logln("Testing the RBBI for character iteration by using default rules");
// fetch the rules used to create the above RuleBasedBreakIterator
String defaultRules = rbbi.toString();
RuleBasedBreakIterator charIterDefault = null;
try {
charIterDefault = new RuleBasedBreakIterator(defaultRules);
} catch (IllegalArgumentException iae) {
errln("ERROR: failed construction in TestDefaultRuleBasedCharacterIteration()" + iae.toString());
}
List<String> chardata = new ArrayList<String>();
chardata.add("H");
chardata.add("e");
chardata.add("l");
chardata.add("l");
chardata.add("o");
chardata.add("e\u0301"); // acuteE
chardata.add("&");
chardata.add("e\u0303"); // tildaE
// devanagiri characters for Hindi support
chardata.add("\u0906"); // devanagiri AA
// chardata.add("\u093e\u0901"); //devanagiri vowelsign AA+ chandrabindhu
chardata.add("\u0916\u0947"); // devanagiri KHA+vowelsign E
chardata.add("\u0938\u0941\u0902"); // devanagiri SA+vowelsign U + anusvara(bindu)
chardata.add("\u0926"); // devanagiri consonant DA
chardata.add("\u0930"); // devanagiri consonant RA
// chardata.add("\u0939\u094c"); //devanagiri HA+vowel sign AI
chardata.add("\u0964"); // devanagiri danda
// end hindi characters
chardata.add("A\u0302"); // circumflexA
chardata.add("i\u0301"); // acuteBelowI
// conjoining jamo...
chardata.add("\u1109\u1161\u11bc");
chardata.add("\u1112\u1161\u11bc");
chardata.add("\n");
chardata.add("\r\n"); // keep CRLF sequences together
chardata.add("S\u0300"); // graveS
chardata.add("i\u0301"); // acuteBelowI
chardata.add("!");
// What follows is a string of Korean characters (I found it in the Yellow Pages
// ad for the Korean Presbyterian Church of San Francisco, and I hope I transcribed
// it correctly), first as precomposed syllables, and then as conjoining jamo.
// Both sequences should be semantically identical and break the same way.
// precomposed syllables...
chardata.add("\uc0c1");
chardata.add("\ud56d");
chardata.add(" ");
chardata.add("\ud55c");
chardata.add("\uc778");
chardata.add(" ");
chardata.add("\uc5f0");
chardata.add("\ud569");
chardata.add(" ");
chardata.add("\uc7a5");
chardata.add("\ub85c");
chardata.add("\uad50");
chardata.add("\ud68c");
chardata.add(" ");
// conjoining jamo...
chardata.add("\u1109\u1161\u11bc");
chardata.add("\u1112\u1161\u11bc");
chardata.add(" ");
chardata.add("\u1112\u1161\u11ab");
chardata.add("\u110b\u1175\u11ab");
chardata.add(" ");
chardata.add("\u110b\u1167\u11ab");
chardata.add("\u1112\u1161\u11b8");
chardata.add(" ");
chardata.add("\u110c\u1161\u11bc");
chardata.add("\u1105\u1169");
chardata.add("\u1100\u116d");
chardata.add("\u1112\u116c");
generalIteratorTest(charIterDefault, chardata);
}
@Test
public void TestDefaultRuleBasedWordIteration() {
logln("Testing the RBBI for word iteration using default rules");
RuleBasedBreakIterator rbbi = (RuleBasedBreakIterator) BreakIterator.getWordInstance();
// fetch the rules used to create the above RuleBasedBreakIterator
String defaultRules = rbbi.toString();
RuleBasedBreakIterator wordIterDefault = null;
try {
wordIterDefault = new RuleBasedBreakIterator(defaultRules);
} catch (IllegalArgumentException iae) {
errln("ERROR: failed construction in TestDefaultRuleBasedWordIteration() -- custom rules" + iae.toString());
}
List<String> worddata = new ArrayList<String>();
worddata.add("Write");
worddata.add(" ");
worddata.add("wordrules");
worddata.add(".");
worddata.add(" ");
// worddata.add("alpha-beta-gamma");
worddata.add(" ");
worddata.add("\u092f\u0939");
worddata.add(" ");
worddata.add("\u0939\u093f" + halfNA + "\u0926\u0940");
worddata.add(" ");
worddata.add("\u0939\u0948");
// worddata.add("\u0964"); //danda followed by a space
worddata.add(" ");
worddata.add("\u0905\u093e\u092a");
worddata.add(" ");
worddata.add("\u0938\u093f\u0916\u094b\u0917\u0947");
worddata.add("?");
worddata.add(" ");
worddata.add("\r");
worddata.add("It's");
worddata.add(" ");
// worddata.add("$30.10");
worddata.add(" ");
worddata.add(" ");
worddata.add("Badges");
worddata.add("?");
worddata.add(" ");
worddata.add("BADGES");
worddata.add("!");
worddata.add("1000,233,456.000");
worddata.add(" ");
generalIteratorTest(wordIterDefault, worddata);
}
// private static final String kParagraphSeparator = "\u2029";
private static final String kLineSeparator = "\u2028";
@Test
public void TestDefaultRuleBasedSentenceIteration() {
logln("Testing the RBBI for sentence iteration using default rules");
RuleBasedBreakIterator rbbi = (RuleBasedBreakIterator) BreakIterator.getSentenceInstance();
// fetch the rules used to create the above RuleBasedBreakIterator
String defaultRules = rbbi.toString();
RuleBasedBreakIterator sentIterDefault = null;
try {
sentIterDefault = new RuleBasedBreakIterator(defaultRules);
} catch (IllegalArgumentException iae) {
errln("ERROR: failed construction in TestDefaultRuleBasedSentenceIteration()" + iae.toString());
}
List<String> sentdata = new ArrayList<String>();
sentdata.add("(This is it.) ");
sentdata.add("Testing the sentence iterator. ");
sentdata.add("\"This isn\'t it.\" ");
sentdata.add("Hi! ");
sentdata.add("This is a simple sample sentence. ");
sentdata.add("(This is it.) ");
sentdata.add("This is a simple sample sentence. ");
sentdata.add("\"This isn\'t it.\" ");
sentdata.add("Hi! ");
sentdata.add("This is a simple sample sentence. ");
sentdata.add("It does not have to make any sense as you can see. ");
sentdata.add("Nel mezzo del cammin di nostra vita, mi ritrovai in una selva oscura. ");
sentdata.add("Che la dritta via aveo smarrita. ");
generalIteratorTest(sentIterDefault, sentdata);
}
@Test
public void TestDefaultRuleBasedLineIteration() {
logln("Testing the RBBI for line iteration using default rules");
RuleBasedBreakIterator rbbi = (RuleBasedBreakIterator) RuleBasedBreakIterator.getLineInstance();
// fetch the rules used to create the above RuleBasedBreakIterator
String defaultRules = rbbi.toString();
RuleBasedBreakIterator lineIterDefault = null;
try {
lineIterDefault = new RuleBasedBreakIterator(defaultRules);
} catch (IllegalArgumentException iae) {
errln("ERROR: failed construction in TestDefaultRuleBasedLineIteration()" + iae.toString());
}
List<String> linedata = new ArrayList<String>();
linedata.add("Multi-");
linedata.add("Level ");
linedata.add("example ");
linedata.add("of ");
linedata.add("a ");
linedata.add("semi-");
linedata.add("idiotic ");
linedata.add("non-");
linedata.add("sensical ");
linedata.add("(non-");
linedata.add("important) ");
linedata.add("sentence. ");
linedata.add("Hi ");
linedata.add("Hello ");
linedata.add("How\n");
linedata.add("are\r");
linedata.add("you" + kLineSeparator);
linedata.add("fine.\t");
linedata.add("good. ");
linedata.add("Now\r");
linedata.add("is\n");
linedata.add("the\r\n");
linedata.add("time\n");
linedata.add("\r");
linedata.add("for\r");
linedata.add("\r");
linedata.add("all");
generalIteratorTest(lineIterDefault, linedata);
}
// =========================================================================
// general test subroutines
// =========================================================================
private void generalIteratorTest(RuleBasedBreakIterator rbbi, List<String> expectedResult) {
StringBuffer buffer = new StringBuffer();
String text;
for (int i = 0; i < expectedResult.size(); i++) {
text = expectedResult.get(i);
buffer.append(text);
}
text = buffer.toString();
if (rbbi == null) {
errln("null iterator, test skipped.");
return;
}
rbbi.setText(text);
List<String> nextResults = _testFirstAndNext(rbbi, text);
List<String> previousResults = _testLastAndPrevious(rbbi, text);
logln("comparing forward and backward...");
//TODO(junit) - needs to be rewritten
//int errs = getErrorCount();
compareFragmentLists("forward iteration", "backward iteration", nextResults, previousResults);
//if (getErrorCount() == errs) {
logln("comparing expected and actual...");
compareFragmentLists("expected result", "actual result", expectedResult, nextResults);
logln("comparing expected and actual...");
compareFragmentLists("expected result", "actual result", expectedResult, nextResults);
//}
int[] boundaries = new int[expectedResult.size() + 3];
boundaries[0] = RuleBasedBreakIterator.DONE;
boundaries[1] = 0;
for (int i = 0; i < expectedResult.size(); i++) {
boundaries[i + 2] = boundaries[i + 1] + (expectedResult.get(i).length());
}
boundaries[boundaries.length - 1] = RuleBasedBreakIterator.DONE;
_testFollowing(rbbi, text, boundaries);
_testPreceding(rbbi, text, boundaries);
_testIsBoundary(rbbi, text, boundaries);
doMultipleSelectionTest(rbbi, text);
}
private List<String> _testFirstAndNext(RuleBasedBreakIterator rbbi, String text) {
int p = rbbi.first();
int lastP = p;
List<String> result = new ArrayList<String>();
if (p != 0) {
errln("first() returned " + p + " instead of 0");
}
while (p != RuleBasedBreakIterator.DONE) {
p = rbbi.next();
if (p != RuleBasedBreakIterator.DONE) {
if (p <= lastP) {
errln("next() failed to move forward: next() on position "
+ lastP + " yielded " + p);
}
result.add(text.substring(lastP, p));
}
else {
if (lastP != text.length()) {
errln("next() returned DONE prematurely: offset was "
+ lastP + " instead of " + text.length());
}
}
lastP = p;
}
return result;
}
private List<String> _testLastAndPrevious(RuleBasedBreakIterator rbbi, String text) {
int p = rbbi.last();
int lastP = p;
List<String> result = new ArrayList<String>();
if (p != text.length()) {
errln("last() returned " + p + " instead of " + text.length());
}
while (p != RuleBasedBreakIterator.DONE) {
p = rbbi.previous();
if (p != RuleBasedBreakIterator.DONE) {
if (p >= lastP) {
errln("previous() failed to move backward: previous() on position "
+ lastP + " yielded " + p);
}
result.add(0, text.substring(p, lastP));
}
else {
if (lastP != 0) {
errln("previous() returned DONE prematurely: offset was "
+ lastP + " instead of 0");
}
}
lastP = p;
}
return result;
}
private void compareFragmentLists(String f1Name, String f2Name, List<String> f1, List<String> f2) {
int p1 = 0;
int p2 = 0;
String s1;
String s2;
int t1 = 0;
int t2 = 0;
while (p1 < f1.size() && p2 < f2.size()) {
s1 = f1.get(p1);
s2 = f2.get(p2);
t1 += s1.length();
t2 += s2.length();
if (s1.equals(s2)) {
debugLogln(" >" + s1 + "<");
++p1;
++p2;
}
else {
int tempT1 = t1;
int tempT2 = t2;
int tempP1 = p1;
int tempP2 = p2;
while (tempT1 != tempT2 && tempP1 < f1.size() && tempP2 < f2.size()) {
while (tempT1 < tempT2 && tempP1 < f1.size()) {
tempT1 += (f1.get(tempP1)).length();
++tempP1;
}
while (tempT2 < tempT1 && tempP2 < f2.size()) {
tempT2 += (f2.get(tempP2)).length();
++tempP2;
}
}
logln("*** " + f1Name + " has:");
while (p1 <= tempP1 && p1 < f1.size()) {
s1 = f1.get(p1);
t1 += s1.length();
debugLogln(" *** >" + s1 + "<");
++p1;
}
logln("***** " + f2Name + " has:");
while (p2 <= tempP2 && p2 < f2.size()) {
s2 = f2.get(p2);
t2 += s2.length();
debugLogln(" ***** >" + s2 + "<");
++p2;
}
errln("Discrepancy between " + f1Name + " and " + f2Name);
}
}
}
private void _testFollowing(RuleBasedBreakIterator rbbi, String text, int[] boundaries) {
logln("testFollowing():");
int p = 2;
for(int i = 0; i <= text.length(); i++) {
if (i == boundaries[p])
++p;
int b = rbbi.following(i);
logln("rbbi.following(" + i + ") -> " + b);
if (b != boundaries[p])
errln("Wrong result from following() for " + i + ": expected " + boundaries[p]
+ ", got " + b);
}
}
private void _testPreceding(RuleBasedBreakIterator rbbi, String text, int[] boundaries) {
logln("testPreceding():");
int p = 0;
for(int i = 0; i <= text.length(); i++) {
int b = rbbi.preceding(i);
logln("rbbi.preceding(" + i + ") -> " + b);
if (b != boundaries[p])
errln("Wrong result from preceding() for " + i + ": expected " + boundaries[p]
+ ", got " + b);
if (i == boundaries[p + 1])
++p;
}
}
private void _testIsBoundary(RuleBasedBreakIterator rbbi, String text, int[] boundaries) {
logln("testIsBoundary():");
int p = 1;
boolean isB;
for(int i = 0; i <= text.length(); i++) {
isB = rbbi.isBoundary(i);
logln("rbbi.isBoundary(" + i + ") -> " + isB);
if(i == boundaries[p]) {
if (!isB)
errln("Wrong result from isBoundary() for " + i + ": expected true, got false");
++p;
}
else {
if(isB)
errln("Wrong result from isBoundary() for " + i + ": expected false, got true");
}
}
}
private void doMultipleSelectionTest(RuleBasedBreakIterator iterator, String testText)
{
logln("Multiple selection test...");
RuleBasedBreakIterator testIterator = (RuleBasedBreakIterator)iterator.clone();
int offset = iterator.first();
int testOffset;
int count = 0;
do {
testOffset = testIterator.first();
testOffset = testIterator.next(count);
logln("next(" + count + ") -> " + testOffset);
if (offset != testOffset)
errln("next(n) and next() not returning consistent results: for step " + count + ", next(n) returned " + testOffset + " and next() had " + offset);
if (offset != RuleBasedBreakIterator.DONE) {
count++;
offset = iterator.next();
}
} while (offset != RuleBasedBreakIterator.DONE);
// now do it backwards...
offset = iterator.last();
count = 0;
do {
testOffset = testIterator.last();
testOffset = testIterator.next(count);
logln("next(" + count + ") -> " + testOffset);
if (offset != testOffset)
errln("next(n) and next() not returning consistent results: for step " + count + ", next(n) returned " + testOffset + " and next() had " + offset);
if (offset != RuleBasedBreakIterator.DONE) {
count--;
offset = iterator.previous();
}
} while (offset != RuleBasedBreakIterator.DONE);
}
private void debugLogln(String s) {
final String zeros = "0000";
String temp;
StringBuffer out = new StringBuffer();
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
if (c >= ' ' && c < '\u007f')
out.append(c);
else {
out.append("\\u");
temp = Integer.toHexString(c);
out.append(zeros.substring(0, 4 - temp.length()));
out.append(temp);
}
}
logln(out.toString());
}
@Test
public void TestThaiDictionaryBreakIterator() {
int position;
int index;
int result[] = { 1, 2, 5, 10, 11, 12, 11, 10, 5, 2, 1, 0 };
char ctext[] = {
0x0041, 0x0020,
0x0E01, 0x0E32, 0x0E23, 0x0E17, 0x0E14, 0x0E25, 0x0E2D, 0x0E07,
0x0020, 0x0041
};
String text = new String(ctext);
ULocale locale = ULocale.createCanonical("th");
BreakIterator b = BreakIterator.getWordInstance(locale);
b.setText(text);
index = 0;
// Test forward iteration
while ((position = b.next())!= BreakIterator.DONE) {
if (position != result[index++]) {
errln("Error with ThaiDictionaryBreakIterator forward iteration test at " + position + ".\nShould have been " + result[index-1]);
}
}
// Test backward iteration
while ((position = b.previous())!= BreakIterator.DONE) {
if (position != result[index++]) {
errln("Error with ThaiDictionaryBreakIterator backward iteration test at " + position + ".\nShould have been " + result[index-1]);
}
}
//Test invalid sequence and spaces
char text2[] = {
0x0E01, 0x0E39, 0x0020, 0x0E01, 0x0E34, 0x0E19, 0x0E01, 0x0E38, 0x0E49, 0x0E07, 0x0020, 0x0E1B,
0x0E34, 0x0E49, 0x0E48, 0x0E07, 0x0E2D, 0x0E22, 0x0E39, 0x0E48, 0x0E43, 0x0E19,
0x0E16, 0x0E49, 0x0E33
};
int expectedWordResult[] = {
2, 3, 6, 10, 11, 15, 17, 20, 22
};
int expectedLineResult[] = {
3, 6, 11, 15, 17, 20, 22
};
BreakIterator brk = BreakIterator.getWordInstance(new ULocale("th"));
brk.setText(new String(text2));
position = index = 0;
while ((position = brk.next()) != BreakIterator.DONE && position < text2.length) {
if (position != expectedWordResult[index++]) {
errln("Incorrect break given by thai word break iterator. Expected: " + expectedWordResult[index-1] + " Got: " + position);
}
}
brk = BreakIterator.getLineInstance(new ULocale("th"));
brk.setText(new String(text2));
position = index = 0;
while ((position = brk.next()) != BreakIterator.DONE && position < text2.length) {
if (position != expectedLineResult[index++]) {
errln("Incorrect break given by thai line break iterator. Expected: " + expectedLineResult[index-1] + " Got: " + position);
}
}
// Improve code coverage
if (brk.preceding(expectedLineResult[1]) != expectedLineResult[0]) {
errln("Incorrect preceding position.");
}
if (brk.following(expectedLineResult[1]) != expectedLineResult[2]) {
errln("Incorrect following position.");
}
int []fillInArray = new int[2];
if (((RuleBasedBreakIterator)brk).getRuleStatusVec(fillInArray) != 1 || fillInArray[0] != 0) {
errln("Error: Since getRuleStatusVec is not supported in DictionaryBasedBreakIterator, it should return 1 and fillInArray[0] == 0.");
}
}
// TODO: Move these test cases to rbbitst.txt if they aren't there already, then remove this test. It is redundant.
@Test
public void TestTailoredBreaks() {
class TBItem {
private int type;
private ULocale locale;
private String text;
private int[] expectOffsets;
TBItem(int typ, ULocale loc, String txt, int[] eOffs) {
type = typ;
locale = loc;
text = txt;
expectOffsets = eOffs;
}
private static final int maxOffsetCount = 128;
private boolean offsetsMatchExpected(int[] foundOffsets, int foundOffsetsLength) {
if ( foundOffsetsLength != expectOffsets.length ) {
return false;
}
for (int i = 0; i < foundOffsetsLength; i++) {
if ( foundOffsets[i] != expectOffsets[i] ) {
return false;
}
}
return true;
}
private String formatOffsets(int[] offsets, int length) {
StringBuffer buildString = new StringBuffer(4*maxOffsetCount);
for (int i = 0; i < length; i++) {
buildString.append(" " + offsets[i]);
}
return buildString.toString();
}
@Test
public void doTest() {
BreakIterator brkIter;
switch( type ) {
case BreakIterator.KIND_CHARACTER: brkIter = BreakIterator.getCharacterInstance(locale); break;
case BreakIterator.KIND_WORD: brkIter = BreakIterator.getWordInstance(locale); break;
case BreakIterator.KIND_LINE: brkIter = BreakIterator.getLineInstance(locale); break;
case BreakIterator.KIND_SENTENCE: brkIter = BreakIterator.getSentenceInstance(locale); break;
default: errln("Unsupported break iterator type " + type); return;
}
brkIter.setText(text);
int[] foundOffsets = new int[maxOffsetCount];
int offset, foundOffsetsCount = 0;
// do forwards iteration test
while ( foundOffsetsCount < maxOffsetCount && (offset = brkIter.next()) != BreakIterator.DONE ) {
foundOffsets[foundOffsetsCount++] = offset;
}
if ( !offsetsMatchExpected(foundOffsets, foundOffsetsCount) ) {
// log error for forwards test
String textToDisplay = (text.length() <= 16)? text: text.substring(0,16);
errln("For type " + type + " " + locale + ", text \"" + textToDisplay + "...\"" +
"; expect " + expectOffsets.length + " offsets:" + formatOffsets(expectOffsets, expectOffsets.length) +
"; found " + foundOffsetsCount + " offsets fwd:" + formatOffsets(foundOffsets, foundOffsetsCount) );
} else {
// do backwards iteration test
--foundOffsetsCount; // back off one from the end offset
while ( foundOffsetsCount > 0 ) {
offset = brkIter.previous();
if ( offset != foundOffsets[--foundOffsetsCount] ) {
// log error for backwards test
String textToDisplay = (text.length() <= 16)? text: text.substring(0,16);
errln("For type " + type + " " + locale + ", text \"" + textToDisplay + "...\"" +
"; expect " + expectOffsets.length + " offsets:" + formatOffsets(expectOffsets, expectOffsets.length) +
"; found rev offset " + offset + " where expect " + foundOffsets[foundOffsetsCount] );
break;
}
}
}
}
}
// KIND_SENTENCE "el"
final String elSentText = "\u0391\u03B2, \u03B3\u03B4; \u0395 \u03B6\u03B7\u037E \u0398 \u03B9\u03BA. " +
"\u039B\u03BC \u03BD\u03BE! \u039F\u03C0, \u03A1\u03C2? \u03A3";
final int[] elSentTOffsets = { 8, 14, 20, 27, 35, 36 };
final int[] elSentROffsets = { 20, 27, 35, 36 };
// KIND_CHARACTER "th"
final String thCharText = "\u0E01\u0E23\u0E30\u0E17\u0E48\u0E2D\u0E21\u0E23\u0E08\u0E19\u0E32 " +
"(\u0E2A\u0E38\u0E0A\u0E32\u0E15\u0E34-\u0E08\u0E38\u0E11\u0E32\u0E21\u0E32\u0E28) " +
"\u0E40\u0E14\u0E47\u0E01\u0E21\u0E35\u0E1B\u0E31\u0E0D\u0E2B\u0E32 ";
final int[] thCharTOffsets = { 1, 2, 3, 5, 6, 7, 8, 9, 10, 11,
12, 13, 15, 16, 17, 19, 20, 22, 23, 24, 25, 26, 27, 28,
29, 30, 32, 33, 35, 37, 38, 39, 40, 41 };
//starting in Unicode 6.1, root behavior should be the same as Thai above
//final int[] thCharROffsets = { 1, 3, 5, 6, 7, 8, 9, 11,
// 12, 13, 15, 17, 19, 20, 22, 24, 26, 27, 28,
// 29, 32, 33, 35, 37, 38, 40, 41 };
final TBItem[] tests = {
new TBItem( BreakIterator.KIND_SENTENCE, new ULocale("el"), elSentText, elSentTOffsets ),
new TBItem( BreakIterator.KIND_SENTENCE, ULocale.ROOT, elSentText, elSentROffsets ),
new TBItem( BreakIterator.KIND_CHARACTER, new ULocale("th"), thCharText, thCharTOffsets ),
new TBItem( BreakIterator.KIND_CHARACTER, ULocale.ROOT, thCharText, thCharTOffsets ),
};
for (int iTest = 0; iTest < tests.length; iTest++) {
tests[iTest].doTest();
}
}
/* Tests the method public Object clone() */
@Test
public void TestClone() {
RuleBasedBreakIterator rbbi = new RuleBasedBreakIterator(".;");
try {
rbbi.setText((CharacterIterator) null);
if (((RuleBasedBreakIterator) rbbi.clone()).getText() != null)
errln("RuleBasedBreakIterator.clone() was suppose to return "
+ "the same object because fText is set to null.");
} catch (Exception e) {
errln("RuleBasedBreakIterator.clone() was not suppose to return " + "an exception.");
}
}
/*
* Tests the method public boolean equals(Object that)
*/
@Test
public void TestEquals() {
RuleBasedBreakIterator rbbi = new RuleBasedBreakIterator(".;");
RuleBasedBreakIterator rbbi1 = new RuleBasedBreakIterator(".;");
// TODO: Tests when "if (fRData != other.fRData && (fRData == null || other.fRData == null))" is true
// Tests when "if (fText == null || other.fText == null)" is true
rbbi.setText((CharacterIterator) null);
if (rbbi.equals(rbbi1)) {
errln("RuleBasedBreakIterator.equals(Object) was not suppose to return "
+ "true when the other object has a null fText.");
}
// Tests when "if (fText == null && other.fText == null)" is true
rbbi1.setText((CharacterIterator) null);
if (!rbbi.equals(rbbi1)) {
errln("RuleBasedBreakIterator.equals(Object) was not suppose to return "
+ "false when both objects has a null fText.");
}
// Tests when an exception occurs
if (rbbi.equals(0)) {
errln("RuleBasedBreakIterator.equals(Object) was suppose to return " + "false when comparing to integer 0.");
}
if (rbbi.equals(0.0)) {
errln("RuleBasedBreakIterator.equals(Object) was suppose to return " + "false when comparing to float 0.0.");
}
if (rbbi.equals("0")) {
errln("RuleBasedBreakIterator.equals(Object) was suppose to return "
+ "false when comparing to string '0'.");
}
}
/*
* Tests the method public int first()
*/
@Test
public void TestFirst() {
RuleBasedBreakIterator rbbi = new RuleBasedBreakIterator(".;");
// Tests when "if (fText == null)" is true
rbbi.setText((CharacterIterator) null);
assertEquals("RuleBasedBreakIterator.first()", BreakIterator.DONE, rbbi.first());
rbbi.setText("abc");
assertEquals("RuleBasedBreakIterator.first()", 0, rbbi.first());
assertEquals("RuleBasedBreakIterator.next()", 1, rbbi.next());
}
/*
* Tests the method public int last()
*/
@Test
public void TestLast() {
RuleBasedBreakIterator rbbi = new RuleBasedBreakIterator(".;");
// Tests when "if (fText == null)" is true
rbbi.setText((CharacterIterator) null);
if (rbbi.last() != BreakIterator.DONE) {
errln("RuleBasedBreakIterator.last() was suppose to return "
+ "BreakIterator.DONE when the object has a null fText.");
}
}
/*
* Tests the method public int following(int offset)
*/
@Test
public void TestFollowing() {
RuleBasedBreakIterator rbbi = new RuleBasedBreakIterator(".;");
// Tests when "else if (offset < fText.getBeginIndex())" is true
rbbi.setText("dummy");
if (rbbi.following(-1) != 0) {
errln("RuleBasedBreakIterator.following(-1) was suppose to return "
+ "0 when the object has a fText of dummy.");
}
}
/*
* Tests the method public int preceding(int offset)
*/
@Test
public void TestPreceding() {
RuleBasedBreakIterator rbbi = new RuleBasedBreakIterator(".;");
// Tests when "if (fText == null || offset > fText.getEndIndex())" is true
rbbi.setText((CharacterIterator)null);
if (rbbi.preceding(-1) != BreakIterator.DONE) {
errln("RuleBasedBreakIterator.preceding(-1) was suppose to return "
+ "0 when the object has a fText of null.");
}
// Tests when "else if (offset < fText.getBeginIndex())" is true
rbbi.setText("dummy");
if (rbbi.preceding(-1) != 0) {
errln("RuleBasedBreakIterator.preceding(-1) was suppose to return "
+ "0 when the object has a fText of dummy.");
}
}
/* Tests the method public int current() */
@Test
public void TestCurrent(){
RuleBasedBreakIterator rbbi = new RuleBasedBreakIterator(".;");
// Tests when "(fText != null) ? fText.getIndex() : BreakIterator.DONE" is true and false
rbbi.setText((CharacterIterator)null);
if(rbbi.current() != BreakIterator.DONE){
errln("RuleBasedBreakIterator.current() was suppose to return "
+ "BreakIterator.DONE when the object has a fText of null.");
}
rbbi.setText("dummy");
if(rbbi.current() != 0){
errln("RuleBasedBreakIterator.current() was suppose to return "
+ "0 when the object has a fText of dummy.");
}
}
@Test
public void TestBug7547() {
try {
new RuleBasedBreakIterator("");
fail("TestBug7547: RuleBasedBreakIterator constructor failed to throw an exception with empty rules.");
}
catch (IllegalArgumentException e) {
// expected exception with empty rules.
}
catch (Exception e) {
fail("TestBug7547: Unexpected exception while creating RuleBasedBreakIterator: " + e);
}
}
@Test
public void TestBug12797() {
String rules = "!!chain; !!forward; $v=b c; a b; $v; !!reverse; .*;";
RuleBasedBreakIterator bi = new RuleBasedBreakIterator(rules);
bi.setText("abc");
bi.first();
assertEquals("Rule chaining test", 3, bi.next());
}
}
|
googleapis/google-cloud-java | 36,077 | java-artifact-registry/proto-google-cloud-artifact-registry-v1/src/main/java/com/google/devtools/artifactregistry/v1/VirtualRepositoryConfig.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/devtools/artifactregistry/v1/repository.proto
// Protobuf Java Version: 3.25.8
package com.google.devtools.artifactregistry.v1;
/**
*
*
* <pre>
* Virtual repository configuration.
* </pre>
*
* Protobuf type {@code google.devtools.artifactregistry.v1.VirtualRepositoryConfig}
*/
public final class VirtualRepositoryConfig extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.devtools.artifactregistry.v1.VirtualRepositoryConfig)
VirtualRepositoryConfigOrBuilder {
private static final long serialVersionUID = 0L;
// Use VirtualRepositoryConfig.newBuilder() to construct.
private VirtualRepositoryConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private VirtualRepositoryConfig() {
upstreamPolicies_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new VirtualRepositoryConfig();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.devtools.artifactregistry.v1.RepositoryProto
.internal_static_google_devtools_artifactregistry_v1_VirtualRepositoryConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.devtools.artifactregistry.v1.RepositoryProto
.internal_static_google_devtools_artifactregistry_v1_VirtualRepositoryConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig.class,
com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig.Builder.class);
}
public static final int UPSTREAM_POLICIES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.devtools.artifactregistry.v1.UpstreamPolicy> upstreamPolicies_;
/**
*
*
* <pre>
* Policies that configure the upstream artifacts distributed by the Virtual
* Repository. Upstream policies cannot be set on a standard repository.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.UpstreamPolicy upstream_policies = 1;
* </code>
*/
@java.lang.Override
public java.util.List<com.google.devtools.artifactregistry.v1.UpstreamPolicy>
getUpstreamPoliciesList() {
return upstreamPolicies_;
}
/**
*
*
* <pre>
* Policies that configure the upstream artifacts distributed by the Virtual
* Repository. Upstream policies cannot be set on a standard repository.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.UpstreamPolicy upstream_policies = 1;
* </code>
*/
@java.lang.Override
public java.util.List<? extends com.google.devtools.artifactregistry.v1.UpstreamPolicyOrBuilder>
getUpstreamPoliciesOrBuilderList() {
return upstreamPolicies_;
}
/**
*
*
* <pre>
* Policies that configure the upstream artifacts distributed by the Virtual
* Repository. Upstream policies cannot be set on a standard repository.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.UpstreamPolicy upstream_policies = 1;
* </code>
*/
@java.lang.Override
public int getUpstreamPoliciesCount() {
return upstreamPolicies_.size();
}
/**
*
*
* <pre>
* Policies that configure the upstream artifacts distributed by the Virtual
* Repository. Upstream policies cannot be set on a standard repository.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.UpstreamPolicy upstream_policies = 1;
* </code>
*/
@java.lang.Override
public com.google.devtools.artifactregistry.v1.UpstreamPolicy getUpstreamPolicies(int index) {
return upstreamPolicies_.get(index);
}
/**
*
*
* <pre>
* Policies that configure the upstream artifacts distributed by the Virtual
* Repository. Upstream policies cannot be set on a standard repository.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.UpstreamPolicy upstream_policies = 1;
* </code>
*/
@java.lang.Override
public com.google.devtools.artifactregistry.v1.UpstreamPolicyOrBuilder
getUpstreamPoliciesOrBuilder(int index) {
return upstreamPolicies_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < upstreamPolicies_.size(); i++) {
output.writeMessage(1, upstreamPolicies_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < upstreamPolicies_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, upstreamPolicies_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig)) {
return super.equals(obj);
}
com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig other =
(com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig) obj;
if (!getUpstreamPoliciesList().equals(other.getUpstreamPoliciesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getUpstreamPoliciesCount() > 0) {
hash = (37 * hash) + UPSTREAM_POLICIES_FIELD_NUMBER;
hash = (53 * hash) + getUpstreamPoliciesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Virtual repository configuration.
* </pre>
*
* Protobuf type {@code google.devtools.artifactregistry.v1.VirtualRepositoryConfig}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.devtools.artifactregistry.v1.VirtualRepositoryConfig)
com.google.devtools.artifactregistry.v1.VirtualRepositoryConfigOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.devtools.artifactregistry.v1.RepositoryProto
.internal_static_google_devtools_artifactregistry_v1_VirtualRepositoryConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.devtools.artifactregistry.v1.RepositoryProto
.internal_static_google_devtools_artifactregistry_v1_VirtualRepositoryConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig.class,
com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig.Builder.class);
}
// Construct using com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (upstreamPoliciesBuilder_ == null) {
upstreamPolicies_ = java.util.Collections.emptyList();
} else {
upstreamPolicies_ = null;
upstreamPoliciesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.devtools.artifactregistry.v1.RepositoryProto
.internal_static_google_devtools_artifactregistry_v1_VirtualRepositoryConfig_descriptor;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig
getDefaultInstanceForType() {
return com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig.getDefaultInstance();
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig build() {
com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig buildPartial() {
com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig result =
new com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig result) {
if (upstreamPoliciesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
upstreamPolicies_ = java.util.Collections.unmodifiableList(upstreamPolicies_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.upstreamPolicies_ = upstreamPolicies_;
} else {
result.upstreamPolicies_ = upstreamPoliciesBuilder_.build();
}
}
private void buildPartial0(
com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig) {
return mergeFrom((com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig other) {
if (other
== com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig.getDefaultInstance())
return this;
if (upstreamPoliciesBuilder_ == null) {
if (!other.upstreamPolicies_.isEmpty()) {
if (upstreamPolicies_.isEmpty()) {
upstreamPolicies_ = other.upstreamPolicies_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureUpstreamPoliciesIsMutable();
upstreamPolicies_.addAll(other.upstreamPolicies_);
}
onChanged();
}
} else {
if (!other.upstreamPolicies_.isEmpty()) {
if (upstreamPoliciesBuilder_.isEmpty()) {
upstreamPoliciesBuilder_.dispose();
upstreamPoliciesBuilder_ = null;
upstreamPolicies_ = other.upstreamPolicies_;
bitField0_ = (bitField0_ & ~0x00000001);
upstreamPoliciesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getUpstreamPoliciesFieldBuilder()
: null;
} else {
upstreamPoliciesBuilder_.addAllMessages(other.upstreamPolicies_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.devtools.artifactregistry.v1.UpstreamPolicy m =
input.readMessage(
com.google.devtools.artifactregistry.v1.UpstreamPolicy.parser(),
extensionRegistry);
if (upstreamPoliciesBuilder_ == null) {
ensureUpstreamPoliciesIsMutable();
upstreamPolicies_.add(m);
} else {
upstreamPoliciesBuilder_.addMessage(m);
}
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.devtools.artifactregistry.v1.UpstreamPolicy>
upstreamPolicies_ = java.util.Collections.emptyList();
private void ensureUpstreamPoliciesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
upstreamPolicies_ =
new java.util.ArrayList<com.google.devtools.artifactregistry.v1.UpstreamPolicy>(
upstreamPolicies_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.devtools.artifactregistry.v1.UpstreamPolicy,
com.google.devtools.artifactregistry.v1.UpstreamPolicy.Builder,
com.google.devtools.artifactregistry.v1.UpstreamPolicyOrBuilder>
upstreamPoliciesBuilder_;
/**
*
*
* <pre>
* Policies that configure the upstream artifacts distributed by the Virtual
* Repository. Upstream policies cannot be set on a standard repository.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.UpstreamPolicy upstream_policies = 1;
* </code>
*/
public java.util.List<com.google.devtools.artifactregistry.v1.UpstreamPolicy>
getUpstreamPoliciesList() {
if (upstreamPoliciesBuilder_ == null) {
return java.util.Collections.unmodifiableList(upstreamPolicies_);
} else {
return upstreamPoliciesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Policies that configure the upstream artifacts distributed by the Virtual
* Repository. Upstream policies cannot be set on a standard repository.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.UpstreamPolicy upstream_policies = 1;
* </code>
*/
public int getUpstreamPoliciesCount() {
if (upstreamPoliciesBuilder_ == null) {
return upstreamPolicies_.size();
} else {
return upstreamPoliciesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Policies that configure the upstream artifacts distributed by the Virtual
* Repository. Upstream policies cannot be set on a standard repository.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.UpstreamPolicy upstream_policies = 1;
* </code>
*/
public com.google.devtools.artifactregistry.v1.UpstreamPolicy getUpstreamPolicies(int index) {
if (upstreamPoliciesBuilder_ == null) {
return upstreamPolicies_.get(index);
} else {
return upstreamPoliciesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Policies that configure the upstream artifacts distributed by the Virtual
* Repository. Upstream policies cannot be set on a standard repository.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.UpstreamPolicy upstream_policies = 1;
* </code>
*/
public Builder setUpstreamPolicies(
int index, com.google.devtools.artifactregistry.v1.UpstreamPolicy value) {
if (upstreamPoliciesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureUpstreamPoliciesIsMutable();
upstreamPolicies_.set(index, value);
onChanged();
} else {
upstreamPoliciesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Policies that configure the upstream artifacts distributed by the Virtual
* Repository. Upstream policies cannot be set on a standard repository.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.UpstreamPolicy upstream_policies = 1;
* </code>
*/
public Builder setUpstreamPolicies(
int index, com.google.devtools.artifactregistry.v1.UpstreamPolicy.Builder builderForValue) {
if (upstreamPoliciesBuilder_ == null) {
ensureUpstreamPoliciesIsMutable();
upstreamPolicies_.set(index, builderForValue.build());
onChanged();
} else {
upstreamPoliciesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Policies that configure the upstream artifacts distributed by the Virtual
* Repository. Upstream policies cannot be set on a standard repository.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.UpstreamPolicy upstream_policies = 1;
* </code>
*/
public Builder addUpstreamPolicies(
com.google.devtools.artifactregistry.v1.UpstreamPolicy value) {
if (upstreamPoliciesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureUpstreamPoliciesIsMutable();
upstreamPolicies_.add(value);
onChanged();
} else {
upstreamPoliciesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Policies that configure the upstream artifacts distributed by the Virtual
* Repository. Upstream policies cannot be set on a standard repository.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.UpstreamPolicy upstream_policies = 1;
* </code>
*/
public Builder addUpstreamPolicies(
int index, com.google.devtools.artifactregistry.v1.UpstreamPolicy value) {
if (upstreamPoliciesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureUpstreamPoliciesIsMutable();
upstreamPolicies_.add(index, value);
onChanged();
} else {
upstreamPoliciesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Policies that configure the upstream artifacts distributed by the Virtual
* Repository. Upstream policies cannot be set on a standard repository.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.UpstreamPolicy upstream_policies = 1;
* </code>
*/
public Builder addUpstreamPolicies(
com.google.devtools.artifactregistry.v1.UpstreamPolicy.Builder builderForValue) {
if (upstreamPoliciesBuilder_ == null) {
ensureUpstreamPoliciesIsMutable();
upstreamPolicies_.add(builderForValue.build());
onChanged();
} else {
upstreamPoliciesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Policies that configure the upstream artifacts distributed by the Virtual
* Repository. Upstream policies cannot be set on a standard repository.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.UpstreamPolicy upstream_policies = 1;
* </code>
*/
public Builder addUpstreamPolicies(
int index, com.google.devtools.artifactregistry.v1.UpstreamPolicy.Builder builderForValue) {
if (upstreamPoliciesBuilder_ == null) {
ensureUpstreamPoliciesIsMutable();
upstreamPolicies_.add(index, builderForValue.build());
onChanged();
} else {
upstreamPoliciesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Policies that configure the upstream artifacts distributed by the Virtual
* Repository. Upstream policies cannot be set on a standard repository.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.UpstreamPolicy upstream_policies = 1;
* </code>
*/
public Builder addAllUpstreamPolicies(
java.lang.Iterable<? extends com.google.devtools.artifactregistry.v1.UpstreamPolicy>
values) {
if (upstreamPoliciesBuilder_ == null) {
ensureUpstreamPoliciesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, upstreamPolicies_);
onChanged();
} else {
upstreamPoliciesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Policies that configure the upstream artifacts distributed by the Virtual
* Repository. Upstream policies cannot be set on a standard repository.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.UpstreamPolicy upstream_policies = 1;
* </code>
*/
public Builder clearUpstreamPolicies() {
if (upstreamPoliciesBuilder_ == null) {
upstreamPolicies_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
upstreamPoliciesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Policies that configure the upstream artifacts distributed by the Virtual
* Repository. Upstream policies cannot be set on a standard repository.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.UpstreamPolicy upstream_policies = 1;
* </code>
*/
public Builder removeUpstreamPolicies(int index) {
if (upstreamPoliciesBuilder_ == null) {
ensureUpstreamPoliciesIsMutable();
upstreamPolicies_.remove(index);
onChanged();
} else {
upstreamPoliciesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Policies that configure the upstream artifacts distributed by the Virtual
* Repository. Upstream policies cannot be set on a standard repository.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.UpstreamPolicy upstream_policies = 1;
* </code>
*/
public com.google.devtools.artifactregistry.v1.UpstreamPolicy.Builder
getUpstreamPoliciesBuilder(int index) {
return getUpstreamPoliciesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Policies that configure the upstream artifacts distributed by the Virtual
* Repository. Upstream policies cannot be set on a standard repository.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.UpstreamPolicy upstream_policies = 1;
* </code>
*/
public com.google.devtools.artifactregistry.v1.UpstreamPolicyOrBuilder
getUpstreamPoliciesOrBuilder(int index) {
if (upstreamPoliciesBuilder_ == null) {
return upstreamPolicies_.get(index);
} else {
return upstreamPoliciesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Policies that configure the upstream artifacts distributed by the Virtual
* Repository. Upstream policies cannot be set on a standard repository.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.UpstreamPolicy upstream_policies = 1;
* </code>
*/
public java.util.List<? extends com.google.devtools.artifactregistry.v1.UpstreamPolicyOrBuilder>
getUpstreamPoliciesOrBuilderList() {
if (upstreamPoliciesBuilder_ != null) {
return upstreamPoliciesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(upstreamPolicies_);
}
}
/**
*
*
* <pre>
* Policies that configure the upstream artifacts distributed by the Virtual
* Repository. Upstream policies cannot be set on a standard repository.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.UpstreamPolicy upstream_policies = 1;
* </code>
*/
public com.google.devtools.artifactregistry.v1.UpstreamPolicy.Builder
addUpstreamPoliciesBuilder() {
return getUpstreamPoliciesFieldBuilder()
.addBuilder(com.google.devtools.artifactregistry.v1.UpstreamPolicy.getDefaultInstance());
}
/**
*
*
* <pre>
* Policies that configure the upstream artifacts distributed by the Virtual
* Repository. Upstream policies cannot be set on a standard repository.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.UpstreamPolicy upstream_policies = 1;
* </code>
*/
public com.google.devtools.artifactregistry.v1.UpstreamPolicy.Builder
addUpstreamPoliciesBuilder(int index) {
return getUpstreamPoliciesFieldBuilder()
.addBuilder(
index, com.google.devtools.artifactregistry.v1.UpstreamPolicy.getDefaultInstance());
}
/**
*
*
* <pre>
* Policies that configure the upstream artifacts distributed by the Virtual
* Repository. Upstream policies cannot be set on a standard repository.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.UpstreamPolicy upstream_policies = 1;
* </code>
*/
public java.util.List<com.google.devtools.artifactregistry.v1.UpstreamPolicy.Builder>
getUpstreamPoliciesBuilderList() {
return getUpstreamPoliciesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.devtools.artifactregistry.v1.UpstreamPolicy,
com.google.devtools.artifactregistry.v1.UpstreamPolicy.Builder,
com.google.devtools.artifactregistry.v1.UpstreamPolicyOrBuilder>
getUpstreamPoliciesFieldBuilder() {
if (upstreamPoliciesBuilder_ == null) {
upstreamPoliciesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.devtools.artifactregistry.v1.UpstreamPolicy,
com.google.devtools.artifactregistry.v1.UpstreamPolicy.Builder,
com.google.devtools.artifactregistry.v1.UpstreamPolicyOrBuilder>(
upstreamPolicies_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
upstreamPolicies_ = null;
}
return upstreamPoliciesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.devtools.artifactregistry.v1.VirtualRepositoryConfig)
}
// @@protoc_insertion_point(class_scope:google.devtools.artifactregistry.v1.VirtualRepositoryConfig)
private static final com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig();
}
public static com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<VirtualRepositoryConfig> PARSER =
new com.google.protobuf.AbstractParser<VirtualRepositoryConfig>() {
@java.lang.Override
public VirtualRepositoryConfig parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<VirtualRepositoryConfig> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<VirtualRepositoryConfig> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1.VirtualRepositoryConfig
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/ignite | 35,889 | modules/zookeeper/src/test/java/org/apache/ignite/spi/discovery/zk/internal/ZookeeperDiscoveryCommunicationFailureTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.spi.discovery.zk.internal;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.BitSet;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.cache.CacheMode;
import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.CommunicationFailureContext;
import org.apache.ignite.configuration.CommunicationFailureResolver;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.TestRecordingCommunicationSpi;
import org.apache.ignite.internal.cluster.ClusterTopologyCheckedException;
import org.apache.ignite.internal.processors.cache.distributed.TestCacheNodeExcludingFilter;
import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.GridDhtPartitionSupplyMessage;
import org.apache.ignite.internal.util.future.GridCompoundFuture;
import org.apache.ignite.internal.util.typedef.G;
import org.apache.ignite.internal.util.typedef.T3;
import org.apache.ignite.internal.util.typedef.internal.CU;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteBiPredicate;
import org.apache.ignite.lang.IgniteInClosure;
import org.apache.ignite.lang.IgniteOutClosure;
import org.apache.ignite.plugin.extensions.communication.Message;
import org.apache.ignite.resources.LoggerResource;
import org.apache.ignite.spi.IgniteSpiException;
import org.apache.ignite.spi.communication.tcp.TcpCommunicationSpi;
import org.apache.ignite.spi.discovery.zk.ZookeeperDiscoverySpi;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.zookeeper.ZkTestClientCnxnSocketNIO;
import org.junit.Ignore;
import org.junit.Test;
import static java.util.concurrent.TimeUnit.SECONDS;
/**
* Tests for Zookeeper SPI discovery.
*/
public class ZookeeperDiscoveryCommunicationFailureTest extends ZookeeperDiscoverySpiTestBase {
/**
* @throws Exception If failed.
*/
@Test
public void testNoOpCommunicationFailureResolve_1() throws Exception {
communicationFailureResolve_Simple(2);
}
/**
* @throws Exception If failed.
*/
@Test
public void testNoOpCommunicationErrorResolve_2() throws Exception {
communicationFailureResolve_Simple(10);
}
/**
* @param nodes Nodes number.
* @throws Exception If failed.
*/
private void communicationFailureResolve_Simple(int nodes) throws Exception {
assert nodes > 1;
sesTimeout = 2000;
commFailureRslvr = NoOpCommunicationFailureResolver.FACTORY;
startGridsMultiThreaded(nodes);
ThreadLocalRandom rnd = ThreadLocalRandom.current();
for (int i = 0; i < 3; i++) {
info("Iteration: " + i);
int idx1 = rnd.nextInt(nodes);
int idx2;
do {
idx2 = rnd.nextInt(nodes);
}
while (idx1 == idx2);
ZookeeperDiscoverySpi spi = spi(ignite(idx1));
spi.resolveCommunicationFailure(ignite(idx2).cluster().localNode(), new Exception("test"));
checkInternalStructuresCleanup();
}
}
/**
* Tests case when one node fails before sending communication status.
*
* @throws Exception If failed.
*/
@Test
public void testNoOpCommunicationErrorResolve_3() throws Exception {
sesTimeout = 2000;
commFailureRslvr = NoOpCommunicationFailureResolver.FACTORY;
startGridsMultiThreaded(3);
sesTimeout = 10_000;
testSockNio = true;
sesTimeout = 5000;
startGrid(3);
IgniteInternalFuture<?> fut = GridTestUtils.runAsync(new Callable<Object>() {
@Override public Object call() {
ZookeeperDiscoverySpi spi = spi(ignite(0));
spi.resolveCommunicationFailure(ignite(1).cluster().localNode(), new Exception("test"));
return null;
}
});
U.sleep(1000);
ZkTestClientCnxnSocketNIO nio = ZkTestClientCnxnSocketNIO.forNode(ignite(3));
nio.closeSocket(true);
try {
stopGrid(3);
fut.get();
}
finally {
nio.allowConnect();
}
waitForTopology(3);
}
/**
* Tests case when Coordinator fails while resolve process is in progress.
*
* @throws Exception If failed.
*/
@Test
public void testNoOpCommunicationErrorResolve_4() throws Exception {
testCommSpi = true;
sesTimeout = 2000;
commFailureRslvr = NoOpCommunicationFailureResolver.FACTORY;
startGrid(0);
startGridsMultiThreaded(1, 3);
ZkTestCommunicationSpi commSpi = ZkTestCommunicationSpi.testSpi(ignite(3));
commSpi.pingLatch = new CountDownLatch(1);
IgniteInternalFuture<?> fut = GridTestUtils.runAsync(new Callable<Object>() {
@Override public Object call() {
ZookeeperDiscoverySpi spi = spi(ignite(1));
spi.resolveCommunicationFailure(ignite(2).cluster().localNode(), new Exception("test"));
return null;
}
});
U.sleep(1000);
assertFalse(fut.isDone());
stopGrid(0);
commSpi.pingLatch.countDown();
fut.get();
waitForTopology(3);
}
/**
* Tests that nodes join is delayed while resolve is in progress.
*
* @throws Exception If failed.
*/
@Test
public void testNoOpCommunicationErrorResolve_5() throws Exception {
testCommSpi = true;
sesTimeout = 2000;
commFailureRslvr = NoOpCommunicationFailureResolver.FACTORY;
startGrid(0);
startGridsMultiThreaded(1, 3);
ZkTestCommunicationSpi commSpi = ZkTestCommunicationSpi.testSpi(ignite(3));
commSpi.pingStartLatch = new CountDownLatch(1);
commSpi.pingLatch = new CountDownLatch(1);
IgniteInternalFuture<?> fut = GridTestUtils.runAsync(new Callable<Object>() {
@Override public Object call() {
ZookeeperDiscoverySpi spi = spi(ignite(1));
spi.resolveCommunicationFailure(ignite(2).cluster().localNode(), new Exception("test"));
return null;
}
});
assertTrue(commSpi.pingStartLatch.await(10, SECONDS));
try {
assertFalse(fut.isDone());
final AtomicInteger nodeIdx = new AtomicInteger(3);
IgniteInternalFuture<?> startFut = GridTestUtils.runMultiThreadedAsync(new Callable<Void>() {
@Override public Void call() throws Exception {
startGrid(nodeIdx.incrementAndGet());
return null;
}
}, 3, "start-node");
U.sleep(1000);
assertFalse(startFut.isDone());
assertEquals(4, ignite(0).cluster().nodes().size());
commSpi.pingLatch.countDown();
startFut.get();
fut.get();
waitForTopology(7);
}
finally {
commSpi.pingLatch.countDown();
}
}
/**
* @throws Exception If failed.
*/
@Test
public void testCommunicationErrorResolve_KillNode_1() throws Exception {
communicationFailureResolve_KillNodes(2, Collections.singleton(2L));
}
/**
* @throws Exception If failed.
*/
@Test
public void testCommunicationErrorResolve_KillNode_2() throws Exception {
communicationFailureResolve_KillNodes(3, Collections.singleton(2L));
}
/**
* @throws Exception If failed.
*/
@Test
public void testCommunicationErrorResolve_KillNode_3() throws Exception {
communicationFailureResolve_KillNodes(10, Arrays.asList(2L, 4L, 6L));
}
/**
* @throws Exception If failed.
*/
@Test
public void testCommunicationErrorResolve_KillCoordinator_1() throws Exception {
communicationFailureResolve_KillNodes(2, Collections.singleton(1L));
}
/**
* @throws Exception If failed.
*/
@Test
public void testCommunicationErrorResolve_KillCoordinator_2() throws Exception {
communicationFailureResolve_KillNodes(3, Collections.singleton(1L));
}
/**
* @throws Exception If failed.
*/
@Test
public void testCommunicationErrorResolve_KillCoordinator_3() throws Exception {
communicationFailureResolve_KillNodes(10, Arrays.asList(1L, 4L, 6L));
}
/**
* @throws Exception If failed.
*/
@Test
public void testCommunicationErrorResolve_KillCoordinator_4() throws Exception {
communicationFailureResolve_KillNodes(10, Arrays.asList(1L, 2L, 3L));
}
/**
* @param startNodes Number of nodes to start.
* @param killNodes Nodes to kill by resolve process.
* @throws Exception If failed.
*/
private void communicationFailureResolve_KillNodes(int startNodes, Collection<Long> killNodes) throws Exception {
testCommSpi = true;
commFailureRslvr = TestNodeKillCommunicationFailureResolver.factory(killNodes);
startGrids(startNodes);
ZkTestCommunicationSpi commSpi = ZkTestCommunicationSpi.testSpi(ignite(0));
commSpi.checkRes = new BitSet(startNodes);
ZookeeperDiscoverySpi spi = null;
UUID killNodeId = null;
for (Ignite node : G.allGrids()) {
ZookeeperDiscoverySpi spi0 = spi(node);
if (!killNodes.contains(node.cluster().localNode().order()))
spi = spi0;
else
killNodeId = node.cluster().localNode().id();
}
assertNotNull(spi);
assertNotNull(killNodeId);
try {
spi.resolveCommunicationFailure(spi.getNode(killNodeId), new Exception("test"));
fail("Exception is not thrown");
}
catch (IgniteSpiException e) {
assertTrue("Unexpected exception: " + e, e.getCause() instanceof ClusterTopologyCheckedException);
}
int expNodes = startNodes - killNodes.size();
waitForTopology(expNodes);
for (Ignite node : G.allGrids())
assertFalse(killNodes.contains(node.cluster().localNode().order()));
startGrid(startNodes);
waitForTopology(expNodes + 1);
}
/**
* @throws Exception If failed.
*/
@Test
public void testCommunicationFailureResolve_KillCoordinator_5() throws Exception {
sesTimeout = 2000;
testCommSpi = true;
commFailureRslvr = KillCoordinatorCommunicationFailureResolver.FACTORY;
startGrids(10);
int crd = 0;
int nodeIdx = 10;
for (int i = 0; i < GridTestUtils.SF.applyLB(4, 2); i++) {
info("Iteration: " + i);
for (Ignite node : G.allGrids())
ZkTestCommunicationSpi.testSpi(node).initCheckResult(10);
UUID crdId = ignite(crd).cluster().localNode().id();
ZookeeperDiscoverySpi spi = spi(ignite(crd + 1));
try {
spi.resolveCommunicationFailure(spi.getNode(crdId), new Exception("test"));
fail("Exception is not thrown");
}
catch (IgniteSpiException e) {
assertTrue("Unexpected exception: " + e, e.getCause() instanceof ClusterTopologyCheckedException);
}
waitForTopology(9);
startGrid(nodeIdx++);
waitForTopology(10);
crd++;
}
}
/**
* @throws Exception If failed.
*/
@Ignore("https://issues.apache.org/jira/browse/IGNITE-10988")
@Test
public void testCommunicationFailureResolve_KillRandom() throws Exception {
sesTimeout = 2000;
testCommSpi = true;
commFailureRslvr = KillRandomCommunicationFailureResolver.FACTORY;
startGridsMultiThreaded(10);
startClientGridsMultiThreaded(10, 5);
int nodesCnt = 15;
waitForTopology(nodesCnt);
int nodeIdx = 15;
for (int i = 0; i < GridTestUtils.SF.applyLB(10, 2); i++) {
info("Iteration: " + i);
ZookeeperDiscoverySpi spi = null;
for (Ignite node : G.allGrids()) {
ZkTestCommunicationSpi.testSpi(node).initCheckResult(100);
spi = spi(node);
}
assert spi != null;
try {
spi.resolveCommunicationFailure(spi.getRemoteNodes().iterator().next(), new Exception("test"));
}
catch (IgniteSpiException ignore) {
// No-op.
}
if (ThreadLocalRandom.current().nextBoolean())
startClientGrid(nodeIdx++);
else
startGrid(nodeIdx++);
nodesCnt = nodesCnt - KillRandomCommunicationFailureResolver.LAST_KILLED_NODES.size() + 1;
waitForTopology(nodesCnt);
}
}
/**
* @throws Exception If failed.
*/
@Test
public void testDefaultCommunicationFailureResolver1() throws Exception {
testCommSpi = true;
sesTimeout = 5000;
startGrids(3);
ZkTestCommunicationSpi.testSpi(ignite(0)).initCheckResult(3, 0, 1);
ZkTestCommunicationSpi.testSpi(ignite(1)).initCheckResult(3, 0, 1);
ZkTestCommunicationSpi.testSpi(ignite(2)).initCheckResult(3, 2);
UUID killedId = nodeId(2);
assertNotNull(ignite(0).cluster().node(killedId));
ZookeeperDiscoverySpi spi = spi(ignite(0));
spi.resolveCommunicationFailure(spi.getNode(ignite(1).cluster().localNode().id()), new Exception("test"));
waitForTopology(2);
assertNull(ignite(0).cluster().node(killedId));
}
/**
* @throws Exception If failed.
*/
@Test
public void testDefaultCommunicationFailureResolver2() throws Exception {
testCommSpi = true;
sesTimeout = 5000;
startGrids(3);
startClientGridsMultiThreaded(3, 2);
ZkTestCommunicationSpi.testSpi(ignite(0)).initCheckResult(5, 0, 1);
ZkTestCommunicationSpi.testSpi(ignite(1)).initCheckResult(5, 0, 1);
ZkTestCommunicationSpi.testSpi(ignite(2)).initCheckResult(5, 2, 3, 4);
ZkTestCommunicationSpi.testSpi(ignite(3)).initCheckResult(5, 2, 3, 4);
ZkTestCommunicationSpi.testSpi(ignite(4)).initCheckResult(5, 2, 3, 4);
ZookeeperDiscoverySpi spi = spi(ignite(0));
spi.resolveCommunicationFailure(spi.getNode(ignite(1).cluster().localNode().id()), new Exception("test"));
waitForTopology(2);
}
/**
* @throws Exception If failed.
*/
@Test
public void testDefaultCommunicationFailureResolver3() throws Exception {
defaultCommunicationFailureResolver_BreakCommunication(3, 1);
}
/**
* @throws Exception If failed.
*/
@Test
public void testDefaultCommunicationFailureResolver4() throws Exception {
defaultCommunicationFailureResolver_BreakCommunication(3, 0);
}
/**
* @throws Exception If failed.
*/
@Test
public void testDefaultCommunicationFailureResolver5() throws Exception {
defaultCommunicationFailureResolver_BreakCommunication(10, 1, 3, 6);
}
/**
* @param startNodes Initial nodes number.
* @param breakNodes Node indices where communication server is closed.
* @throws Exception If failed.
*/
private void defaultCommunicationFailureResolver_BreakCommunication(int startNodes, final int...breakNodes) throws Exception {
sesTimeout = 5000;
startGridsMultiThreaded(startNodes);
final CyclicBarrier b = new CyclicBarrier(breakNodes.length);
GridTestUtils.runMultiThreaded(new IgniteInClosure<Integer>() {
@Override public void apply(Integer threadIdx) {
try {
b.await();
int nodeIdx = breakNodes[threadIdx];
info("Close communication: " + nodeIdx);
((TcpCommunicationSpi)ignite(nodeIdx).configuration().getCommunicationSpi()).simulateNodeFailure();
}
catch (Exception e) {
fail("Unexpected error: " + e);
}
}
}, breakNodes.length, "break-communication");
waitForTopology(startNodes - breakNodes.length);
}
/**
* @throws Exception If failed.
*/
@Test
public void testCommunicationFailureResolve_CachesInfo1() throws Exception {
testCommSpi = true;
sesTimeout = 5000;
final CacheInfoCommunicationFailureResolver rslvr = new CacheInfoCommunicationFailureResolver();
commFailureRslvr = new IgniteOutClosure<CommunicationFailureResolver>() {
@Override public CommunicationFailureResolver apply() {
return rslvr;
}
};
startGrids(2);
awaitPartitionMapExchange();
Map<String, T3<Integer, Integer, Integer>> expCaches = new HashMap<>();
expCaches.put(DEFAULT_CACHE_NAME, new T3<>(RendezvousAffinityFunction.DFLT_PARTITION_COUNT, 0, 1));
checkResolverCachesInfo(ignite(0), expCaches);
List<CacheConfiguration> caches = new ArrayList<>();
CacheConfiguration c1 = new CacheConfiguration("c1");
c1.setBackups(1);
c1.setAffinity(new RendezvousAffinityFunction(false, 64));
caches.add(c1);
CacheConfiguration c2 = new CacheConfiguration("c2");
c2.setBackups(2);
c2.setAffinity(new RendezvousAffinityFunction(false, 128));
caches.add(c2);
CacheConfiguration c3 = new CacheConfiguration("c3");
c3.setCacheMode(CacheMode.REPLICATED);
c3.setAffinity(new RendezvousAffinityFunction(false, 256));
caches.add(c3);
ignite(0).createCaches(caches);
expCaches.put("c1", new T3<>(64, 1, 2));
expCaches.put("c2", new T3<>(128, 2, 2));
expCaches.put("c3", new T3<>(256, 1, 2));
checkResolverCachesInfo(ignite(0), expCaches);
startGrid(2);
startGrid(3);
awaitPartitionMapExchange();
expCaches.put("c2", new T3<>(128, 2, 3));
expCaches.put("c3", new T3<>(256, 1, 4));
checkResolverCachesInfo(ignite(0), expCaches);
CacheConfiguration<Object, Object> c4 = new CacheConfiguration<>("c4");
c4.setCacheMode(CacheMode.PARTITIONED);
c4.setBackups(0);
c4.setAffinity(new RendezvousAffinityFunction(false, 256));
c4.setNodeFilter(new TestCacheNodeExcludingFilter(getTestIgniteInstanceName(0), getTestIgniteInstanceName(1)));
ignite(2).createCache(c4);
expCaches.put("c4", new T3<>(256, 0, 1));
checkResolverCachesInfo(ignite(0), expCaches);
stopGrid(0); // Stop current coordinator, check new coordinator will initialize required caches information.
awaitPartitionMapExchange();
expCaches.put("c3", new T3<>(256, 1, 3));
checkResolverCachesInfo(ignite(1), expCaches);
startGrid(0);
expCaches.put("c3", new T3<>(256, 1, 4));
checkResolverCachesInfo(ignite(1), expCaches);
stopGrid(1);
expCaches.put("c3", new T3<>(256, 1, 3));
checkResolverCachesInfo(ignite(3), expCaches);
}
/**
* @throws Exception If failed.
*/
@Test
public void testCommunicationFailureResolve_CachesInfo2() throws Exception {
testCommSpi = true;
sesTimeout = 5000;
final CacheInfoCommunicationFailureResolver rslvr = new CacheInfoCommunicationFailureResolver();
commFailureRslvr = new IgniteOutClosure<CommunicationFailureResolver>() {
@Override public CommunicationFailureResolver apply() {
return rslvr;
}
};
Ignite srv0 = startGrid(0);
CacheConfiguration<Object, Object> ccfg = new CacheConfiguration<>("c1");
ccfg.setBackups(1);
srv0.createCache(ccfg);
// Block rebalance to make sure node0 will be the only owner.
TestRecordingCommunicationSpi.spi(srv0).blockMessages(new IgniteBiPredicate<ClusterNode, Message>() {
@Override public boolean apply(ClusterNode node, Message msg) {
return msg instanceof GridDhtPartitionSupplyMessage &&
((GridDhtPartitionSupplyMessage)msg).groupId() == CU.cacheId("c1");
}
});
startGrid(1);
U.sleep(1000);
ZookeeperDiscoverySpi spi = spi(srv0);
rslvr.latch = new CountDownLatch(1);
ZkTestCommunicationSpi.testSpi(srv0).initCheckResult(2, 0);
spi.resolveCommunicationFailure(spi.getRemoteNodes().iterator().next(), new Exception("test"));
assertTrue(rslvr.latch.await(10, SECONDS));
List<List<ClusterNode>> cacheOwners = rslvr.ownersMap.get("c1");
ClusterNode node0 = srv0.cluster().localNode();
for (int p = 0; p < RendezvousAffinityFunction.DFLT_PARTITION_COUNT; p++) {
List<ClusterNode> owners = cacheOwners.get(p);
assertEquals(1, owners.size());
assertEquals(node0, owners.get(0));
}
TestRecordingCommunicationSpi.spi(srv0).stopBlock();
awaitPartitionMapExchange();
Map<String, T3<Integer, Integer, Integer>> expCaches = new HashMap<>();
expCaches.put(DEFAULT_CACHE_NAME, new T3<>(RendezvousAffinityFunction.DFLT_PARTITION_COUNT, 0, 1));
expCaches.put("c1", new T3<>(RendezvousAffinityFunction.DFLT_PARTITION_COUNT, 1, 2));
checkResolverCachesInfo(srv0, expCaches);
}
/**
* @param crd Coordinator node.
* @param expCaches Expected caches info.
* @throws Exception If failed.
*/
private void checkResolverCachesInfo(Ignite crd, Map<String, T3<Integer, Integer, Integer>> expCaches)
throws Exception {
CacheInfoCommunicationFailureResolver rslvr =
(CacheInfoCommunicationFailureResolver)crd.configuration().getCommunicationFailureResolver();
assertNotNull(rslvr);
ZookeeperDiscoverySpi spi = spi(crd);
rslvr.latch = new CountDownLatch(1);
ZkTestCommunicationSpi.testSpi(crd).initCheckResult(crd.cluster().nodes().size(), 0);
spi.resolveCommunicationFailure(spi.getRemoteNodes().iterator().next(), new Exception("test"));
assertTrue(rslvr.latch.await(10, SECONDS));
rslvr.checkCachesInfo(expCaches);
rslvr.reset();
}
/**
* @throws Exception If failed.
*/
@SuppressWarnings("unchecked")
@Test
public void testCommunicationFailureResolve_ConcurrentDiscoveyEvents() throws Exception {
sesTimeout = 5000;
commFailureRslvr = NoOpCommunicationFailureResolver.FACTORY;
final int INIT_NODES = 5;
startGridsMultiThreaded(INIT_NODES);
final CyclicBarrier b = new CyclicBarrier(4);
GridCompoundFuture<?, ?> fut = new GridCompoundFuture<>();
final AtomicBoolean stop = new AtomicBoolean();
fut.add((IgniteInternalFuture)GridTestUtils.runAsync(new Callable<Void>() {
@Override public Void call() throws Exception {
b.await();
ThreadLocalRandom rnd = ThreadLocalRandom.current();
for (int i = 0; i < 10; i++) {
startGrid(i + INIT_NODES);
//noinspection BusyWait
Thread.sleep(rnd.nextLong(1000) + 10);
if (stop.get())
break;
}
return null;
}
}, "test-node-start"));
fut.add((IgniteInternalFuture)GridTestUtils.runAsync(new Callable<Void>() {
@Override public Void call() throws Exception {
b.await();
ThreadLocalRandom rnd = ThreadLocalRandom.current();
while (!stop.get()) {
startGrid(100);
//noinspection BusyWait
Thread.sleep(rnd.nextLong(1000) + 10);
stopGrid(100);
//noinspection BusyWait
Thread.sleep(rnd.nextLong(1000) + 10);
}
return null;
}
}, "test-node-restart"));
fut.add((IgniteInternalFuture)GridTestUtils.runAsync(new Callable<Void>() {
@Override public Void call() throws Exception {
b.await();
ThreadLocalRandom rnd = ThreadLocalRandom.current();
int idx = 0;
while (!stop.get()) {
CacheConfiguration ccfg = new CacheConfiguration("c-" + idx++);
ccfg.setBackups(rnd.nextInt(5));
ignite(rnd.nextInt(INIT_NODES)).createCache(ccfg);
//noinspection BusyWait
Thread.sleep(rnd.nextLong(1000) + 10);
ignite(rnd.nextInt(INIT_NODES)).destroyCache(ccfg.getName());
//noinspection BusyWait
Thread.sleep(rnd.nextLong(1000) + 10);
}
return null;
}
}, "test-create-cache"));
fut.add((IgniteInternalFuture)GridTestUtils.runMultiThreadedAsync(new Callable<Void>() {
@Override public Void call() throws Exception {
try {
b.await();
ThreadLocalRandom rnd = ThreadLocalRandom.current();
for (int i = 0; i < 5; i++) {
info("resolveCommunicationFailure: " + i);
ZookeeperDiscoverySpi spi = spi(ignite(rnd.nextInt(INIT_NODES)));
spi.resolveCommunicationFailure(ignite(rnd.nextInt(INIT_NODES)).cluster().localNode(),
new Exception("test"));
}
return null;
}
finally {
stop.set(true);
}
}
}, 5, "test-resolve-failure"));
fut.markInitialized();
fut.get();
}
/**
* @throws Exception If failed.
*/
@Test
public void testCommunicationFailureResolve_ConcurrentMultinode() throws Exception {
sesTimeout = 5000;
commFailureRslvr = NoOpCommunicationFailureResolver.FACTORY;
startGridsMultiThreaded(5);
startClientGridsMultiThreaded(5, 5);
final int NODES = 10;
GridTestUtils.runMultiThreaded(new Callable<Void>() {
@Override public Void call() {
ThreadLocalRandom rnd = ThreadLocalRandom.current();
for (int i = 0; i < 5; i++) {
info("resolveCommunicationFailure: " + i);
ZookeeperDiscoverySpi spi = spi(ignite(rnd.nextInt(NODES)));
spi.resolveCommunicationFailure(spi.getRemoteNodes().iterator().next(), new Exception("test"));
}
return null;
}
}, 30, "test-resolve-failure");
}
/** */
private static class CacheInfoCommunicationFailureResolver implements CommunicationFailureResolver {
/** */
@LoggerResource
private IgniteLogger log;
/** */
Map<String, CacheConfiguration<?, ?>> caches;
/** */
Map<String, List<List<ClusterNode>>> affMap;
/** */
Map<String, List<List<ClusterNode>>> ownersMap;
/** */
volatile CountDownLatch latch;
/** {@inheritDoc} */
@Override public void resolve(CommunicationFailureContext ctx) {
assert latch != null;
assert latch.getCount() == 1L : latch.getCount();
caches = ctx.startedCaches();
log.info("Resolver called, started caches: " + caches.keySet());
assertNotNull(caches);
affMap = new HashMap<>();
ownersMap = new HashMap<>();
for (String cache : caches.keySet()) {
affMap.put(cache, ctx.cacheAffinity(cache));
ownersMap.put(cache, ctx.cachePartitionOwners(cache));
}
latch.countDown();
}
/**
* @param expCaches Expected caches information (when late assignment doen and rebalance finished).
*/
void checkCachesInfo(Map<String, T3<Integer, Integer, Integer>> expCaches) {
assertNotNull(caches);
assertNotNull(affMap);
assertNotNull(ownersMap);
for (Map.Entry<String, T3<Integer, Integer, Integer>> e : expCaches.entrySet()) {
String cacheName = e.getKey();
int parts = e.getValue().get1();
int backups = e.getValue().get2();
int expNodes = e.getValue().get3();
assertTrue(cacheName, caches.containsKey(cacheName));
CacheConfiguration ccfg = caches.get(cacheName);
assertEquals(cacheName, ccfg.getName());
if (ccfg.getCacheMode() == CacheMode.REPLICATED)
assertEquals(Integer.MAX_VALUE, ccfg.getBackups());
else
assertEquals(backups, ccfg.getBackups());
assertEquals(parts, ccfg.getAffinity().partitions());
List<List<ClusterNode>> aff = affMap.get(cacheName);
assertNotNull(cacheName, aff);
assertEquals(parts, aff.size());
List<List<ClusterNode>> owners = ownersMap.get(cacheName);
assertNotNull(cacheName, owners);
assertEquals(parts, owners.size());
for (int i = 0; i < parts; i++) {
List<ClusterNode> partAff = aff.get(i);
assertEquals(cacheName, expNodes, partAff.size());
List<ClusterNode> partOwners = owners.get(i);
assertEquals(cacheName, expNodes, partOwners.size());
assertTrue(cacheName, partAff.containsAll(partOwners));
assertTrue(cacheName, partOwners.containsAll(partAff));
}
}
}
/** */
void reset() {
caches = null;
affMap = null;
ownersMap = null;
}
}
/** */
private static class NoOpCommunicationFailureResolver implements CommunicationFailureResolver {
/** */
static final IgniteOutClosure<CommunicationFailureResolver> FACTORY
= (IgniteOutClosure<CommunicationFailureResolver>)NoOpCommunicationFailureResolver::new;
/** {@inheritDoc} */
@Override public void resolve(CommunicationFailureContext ctx) {
// No-op.
}
}
/** */
private static class KillCoordinatorCommunicationFailureResolver implements CommunicationFailureResolver {
/** */
static final IgniteOutClosure<CommunicationFailureResolver> FACTORY
= (IgniteOutClosure<CommunicationFailureResolver>)KillCoordinatorCommunicationFailureResolver::new;
/** */
@LoggerResource
private IgniteLogger log;
/** {@inheritDoc} */
@Override public void resolve(CommunicationFailureContext ctx) {
List<ClusterNode> nodes = ctx.topologySnapshot();
ClusterNode node = nodes.get(0);
log.info("Resolver kills node: " + node.id());
ctx.killNode(node);
}
}
/** */
private static class KillRandomCommunicationFailureResolver implements CommunicationFailureResolver {
/** */
static final IgniteOutClosure<CommunicationFailureResolver> FACTORY
= (IgniteOutClosure<CommunicationFailureResolver>)KillRandomCommunicationFailureResolver::new;
/** Last killed nodes. */
static final Set<ClusterNode> LAST_KILLED_NODES = new HashSet<>();
/** */
@LoggerResource
private IgniteLogger log;
/** {@inheritDoc} */
@Override public void resolve(CommunicationFailureContext ctx) {
LAST_KILLED_NODES.clear();
List<ClusterNode> nodes = ctx.topologySnapshot();
ThreadLocalRandom rnd = ThreadLocalRandom.current();
int killNodes = rnd.nextInt(nodes.size() / 2);
log.info("Resolver kills nodes [total=" + nodes.size() + ", kill=" + killNodes + ']');
long srvCnt = nodes.stream().filter(node -> !node.isClient()).count();
Set<Integer> idxs = new HashSet<>();
while (idxs.size() < killNodes) {
int idx = rnd.nextInt(nodes.size());
if (!nodes.get(idx).isClient() && !idxs.contains(idx) && --srvCnt < 1)
continue;
idxs.add(idx);
}
for (int idx : idxs) {
ClusterNode node = nodes.get(idx);
log.info("Resolver kills node: " + node.id());
LAST_KILLED_NODES.add(node);
ctx.killNode(node);
}
}
}
/** */
private static class TestNodeKillCommunicationFailureResolver implements CommunicationFailureResolver {
/**
* @param killOrders Killed nodes order.
* @return Factory.
*/
static IgniteOutClosure<CommunicationFailureResolver> factory(final Collection<Long> killOrders) {
return new IgniteOutClosure<CommunicationFailureResolver>() {
@Override public CommunicationFailureResolver apply() {
return new TestNodeKillCommunicationFailureResolver(killOrders);
}
};
}
/** */
final Collection<Long> killNodeOrders;
/**
* @param killNodeOrders Killed nodes order.
*/
TestNodeKillCommunicationFailureResolver(Collection<Long> killNodeOrders) {
this.killNodeOrders = killNodeOrders;
}
/** {@inheritDoc} */
@Override public void resolve(CommunicationFailureContext ctx) {
List<ClusterNode> nodes = ctx.topologySnapshot();
assertTrue(!nodes.isEmpty());
for (ClusterNode node : nodes) {
if (killNodeOrders.contains(node.order()))
ctx.killNode(node);
}
}
}
}
|
apache/juneau | 34,205 | juneau-core/juneau-marshall/src/main/java/org/apache/juneau/oapi/OpenApiSerializer.java | // ***************************************************************************************************************************
// * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file *
// * distributed with this work for additional information regarding copyright ownership. The ASF licenses this file *
// * to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance *
// * with the License. You may obtain a copy of the License at *
// * *
// * http://www.apache.org/licenses/LICENSE-2.0 *
// * *
// * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an *
// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the *
// * specific language governing permissions and limitations under the License. *
// ***************************************************************************************************************************
package org.apache.juneau.oapi;
import java.lang.annotation.*;
import java.nio.charset.*;
import java.util.*;
import java.util.concurrent.*;
import org.apache.juneau.*;
import org.apache.juneau.annotation.*;
import org.apache.juneau.httppart.*;
import org.apache.juneau.internal.*;
import org.apache.juneau.uon.*;
import org.apache.juneau.utils.*;
/**
* Serializes POJOs to values suitable for transmission as HTTP headers, query/form-data parameters, and path variables.
*
* <h5 class='section'>Notes:</h5><ul>
* <li class='note'>This class is thread safe and reusable.
* </ul>
*
* <h5 class='section'>See Also:</h5><ul>
* <li class='link'><a class="doclink" href="https://juneau.apache.org/docs/topics/OpenApiBasics">OpenApi Basics</a>
* </ul>
*/
public class OpenApiSerializer extends UonSerializer implements OpenApiMetaProvider {
//-------------------------------------------------------------------------------------------------------------------
// Static
//-------------------------------------------------------------------------------------------------------------------
/** Reusable instance of {@link OpenApiSerializer}, all default settings. */
public static final OpenApiSerializer DEFAULT = new OpenApiSerializer(create());
/**
* Creates a new builder for this object.
*
* @return A new builder.
*/
public static Builder create() {
return new Builder();
}
//-------------------------------------------------------------------------------------------------------------------
// Builder
//-------------------------------------------------------------------------------------------------------------------
/**
* Builder class.
*/
@FluentSetters
public static class Builder extends UonSerializer.Builder {
private static final Cache<HashKey,OpenApiSerializer> CACHE = Cache.of(HashKey.class, OpenApiSerializer.class).build();
HttpPartFormat format;
HttpPartCollectionFormat collectionFormat;
/**
* Constructor, default settings.
*/
protected Builder() {
produces("text/openapi");
format = HttpPartFormat.NO_FORMAT;
collectionFormat = HttpPartCollectionFormat.NO_COLLECTION_FORMAT;
}
/**
* Copy constructor.
*
* @param copyFrom The bean to copy from.
*/
protected Builder(OpenApiSerializer copyFrom) {
super(copyFrom);
format = copyFrom.format;
collectionFormat = copyFrom.collectionFormat;
}
/**
* Copy constructor.
*
* @param copyFrom The builder to copy from.
*/
protected Builder(Builder copyFrom) {
super(copyFrom);
format = copyFrom.format;
collectionFormat = copyFrom.collectionFormat;
}
@Override /* Context.Builder */
public Builder copy() {
return new Builder(this);
}
@Override /* Context.Builder */
public OpenApiSerializer build() {
return cache(CACHE).build(OpenApiSerializer.class);
}
@Override /* Context.Builder */
public HashKey hashKey() {
return HashKey.of(
super.hashKey(),
format,
collectionFormat
);
}
//-----------------------------------------------------------------------------------------------------------------
// Properties
//-----------------------------------------------------------------------------------------------------------------
/**
* <i><l>OpenApiCommon</l> configuration property: </i> Default format for HTTP parts.
*
* <p>
* Specifies the format to use for HTTP parts when not otherwise specified via {@link org.apache.juneau.annotation.Schema#format()}.
*
* <h5 class='section'>Example:</h5>
* <p class='bjava'>
* <jc>// Create a plain-text serializer.</jc>
* OpenApiSerializer <jv>serializer1</jv> = OpenApiSerializer
* .<jsm>create</jsm>()
* .build();
*
* <jc>// Create a UON serializer.</jc>
* OpenApiSerializer <jv>serializer2</jv> = OpenApiSerializer
* .<jsm>create</jsm>()
* .format(<jsf>UON</jsf>)
* .build();
*
* String <jv>string</jv> = <js>"foo bar"</js>;
*
* <jc>// Produces: "foo bar"</jc>
* String <jv>value1</jv> = <jv>serializer1</jv>.serialize(<jv>string</jv>);
*
* <jc>// Produces: "'foo bar'"</jc>
* String <jv>value2</jv> = <jv>serializer2</jv>.serialize(<jv>string</jv>);
* </p>
*
* <ul class='values javatree'>
* <li class='jc'>{@link org.apache.juneau.httppart.HttpPartFormat}
* <ul>
* <li class='jf'>{@link org.apache.juneau.httppart.HttpPartFormat#UON UON} - UON notation (e.g. <js>"'foo bar'"</js>).
* <li class='jf'>{@link org.apache.juneau.httppart.HttpPartFormat#INT32 INT32} - Signed 32 bits.
* <li class='jf'>{@link org.apache.juneau.httppart.HttpPartFormat#INT64 INT64} - Signed 64 bits.
* <li class='jf'>{@link org.apache.juneau.httppart.HttpPartFormat#FLOAT FLOAT} - 32-bit floating point number.
* <li class='jf'>{@link org.apache.juneau.httppart.HttpPartFormat#DOUBLE DOUBLE} - 64-bit floating point number.
* <li class='jf'>{@link org.apache.juneau.httppart.HttpPartFormat#BYTE BYTE} - BASE-64 encoded characters.
* <li class='jf'>{@link org.apache.juneau.httppart.HttpPartFormat#BINARY BINARY} - Hexadecimal encoded octets (e.g. <js>"00FF"</js>).
* <li class='jf'>{@link org.apache.juneau.httppart.HttpPartFormat#BINARY_SPACED BINARY_SPACED} - Spaced-separated hexadecimal encoded octets (e.g. <js>"00 FF"</js>).
* <li class='jf'>{@link org.apache.juneau.httppart.HttpPartFormat#DATE DATE} - An <a href='http://xml2rfc.ietf.org/public/rfc/html/rfc3339.html#anchor14'>RFC3339 full-date</a>.
* <li class='jf'>{@link org.apache.juneau.httppart.HttpPartFormat#DATE_TIME DATE_TIME} - An <a href='http://xml2rfc.ietf.org/public/rfc/html/rfc3339.html#anchor14'>RFC3339 date-time</a>.
* <li class='jf'>{@link org.apache.juneau.httppart.HttpPartFormat#PASSWORD PASSWORD} - Used to hint UIs the input needs to be obscured.
* <li class='jf'>{@link org.apache.juneau.httppart.HttpPartFormat#NO_FORMAT NO_FORMAT} - (default) Not specified.
* </ul>
* </ul>
*
* @param value The new value for this property.
* @return This object.
*/
@FluentSetter
public Builder format(HttpPartFormat value) {
format = value;
return this;
}
/**
* <i><l>OpenApiCommon</l> configuration property: </i> Default collection format for HTTP parts.
*
* <p>
* Specifies the collection format to use for HTTP parts when not otherwise specified via {@link org.apache.juneau.annotation.Schema#collectionFormat()}.
*
* <h5 class='section'>Example:</h5>
* <p class='bjava'>
* <jc>// Create a serializer using CSV for collections.</jc>
* OpenApiSerializer <jv>serializer1</jv> = OpenApiSerializer
* .<jsm>create</jsm>()
* .collectionFormat(<jsf>CSV</jsf>)
* .build();
*
* <jc>// Create a serializer using UON for collections.</jc>
* OpenApiSerializer <jv>serializer2</jv> = OpenApiSerializer
* .<jsm>create</jsm>()
* .collectionFormat(<jsf>UON</jsf>)
* .build();
*
* <jc>// An arbitrary data structure.</jc>
* JsonList <jv>list</jv> = JsonList.<jsm>of</jsm>(
* <js>"foo"</js>,
* <js>"bar"</js>,
* JsonMap.<jsm>of</jsm>(
* <js>"baz"</js>, JsonList.<jsm>of</jsm>(<js>"qux"</js>,<js>"true"</js>,<js>"123"</js>)
* )
* );
*
* <jc>// Produces: "foo=bar,baz=qux\,true\,123"</jc>
* String <jv>value1</jv> = <jv>serializer1</jv>.serialize(<jv>list</jv>)
*
* <jc>// Produces: "(foo=bar,baz=@(qux,'true','123'))"</jc>
* String <jv>value2</jv> = <jv>serializer2</jv>.serialize(<jv>list</jv>)
* </p>
*
* <ul class='values javatree'>
* <li class='jc'>{@link org.apache.juneau.httppart.HttpPartFormat}
* <ul>
* <li class='jf'>{@link org.apache.juneau.httppart.HttpPartCollectionFormat#CSV CSV} - (default) Comma-separated values (e.g. <js>"foo,bar"</js>).
* <li class='jf'>{@link org.apache.juneau.httppart.HttpPartCollectionFormat#SSV SSV} - Space-separated values (e.g. <js>"foo bar"</js>).
* <li class='jf'>{@link org.apache.juneau.httppart.HttpPartCollectionFormat#TSV TSV} - Tab-separated values (e.g. <js>"foo\tbar"</js>).
* <li class='jf'>{@link org.apache.juneau.httppart.HttpPartCollectionFormat#PIPES PIPES} - Pipe-separated values (e.g. <js>"foo|bar"</js>).
* <li class='jf'>{@link org.apache.juneau.httppart.HttpPartCollectionFormat#MULTI MULTI} - Corresponds to multiple parameter instances instead of multiple values for a single instance (e.g. <js>"foo=bar&foo=baz"</js>).
* <li class='jf'>{@link org.apache.juneau.httppart.HttpPartCollectionFormat#UONC UONC} - UON collection notation (e.g. <js>"@(foo,bar)"</js>).
* </ul>
* </ul>
*
* @param value The new value for this property.
* @return This object.
*/
@FluentSetter
public Builder collectionFormat(HttpPartCollectionFormat value) {
collectionFormat = value;
return this;
}
// <FluentSetters>
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder annotations(Annotation...values) {
super.annotations(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder apply(AnnotationWorkList work) {
super.apply(work);
return this;
}
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder applyAnnotations(Object...from) {
super.applyAnnotations(from);
return this;
}
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder applyAnnotations(Class<?>...from) {
super.applyAnnotations(from);
return this;
}
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder cache(Cache<HashKey,? extends org.apache.juneau.Context> value) {
super.cache(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder debug() {
super.debug();
return this;
}
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder debug(boolean value) {
super.debug(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder impl(Context value) {
super.impl(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder type(Class<? extends org.apache.juneau.Context> value) {
super.type(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanClassVisibility(Visibility value) {
super.beanClassVisibility(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanConstructorVisibility(Visibility value) {
super.beanConstructorVisibility(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanContext(BeanContext value) {
super.beanContext(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanContext(BeanContext.Builder value) {
super.beanContext(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanDictionary(java.lang.Class<?>...values) {
super.beanDictionary(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanFieldVisibility(Visibility value) {
super.beanFieldVisibility(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanInterceptor(Class<?> on, Class<? extends org.apache.juneau.swap.BeanInterceptor<?>> value) {
super.beanInterceptor(on, value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanMapPutReturnsOldValue() {
super.beanMapPutReturnsOldValue();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanMethodVisibility(Visibility value) {
super.beanMethodVisibility(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanProperties(Map<String,Object> values) {
super.beanProperties(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanProperties(Class<?> beanClass, String properties) {
super.beanProperties(beanClass, properties);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanProperties(String beanClassName, String properties) {
super.beanProperties(beanClassName, properties);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesExcludes(Map<String,Object> values) {
super.beanPropertiesExcludes(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesExcludes(Class<?> beanClass, String properties) {
super.beanPropertiesExcludes(beanClass, properties);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesExcludes(String beanClassName, String properties) {
super.beanPropertiesExcludes(beanClassName, properties);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesReadOnly(Map<String,Object> values) {
super.beanPropertiesReadOnly(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesReadOnly(Class<?> beanClass, String properties) {
super.beanPropertiesReadOnly(beanClass, properties);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesReadOnly(String beanClassName, String properties) {
super.beanPropertiesReadOnly(beanClassName, properties);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesWriteOnly(Map<String,Object> values) {
super.beanPropertiesWriteOnly(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesWriteOnly(Class<?> beanClass, String properties) {
super.beanPropertiesWriteOnly(beanClass, properties);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesWriteOnly(String beanClassName, String properties) {
super.beanPropertiesWriteOnly(beanClassName, properties);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beansRequireDefaultConstructor() {
super.beansRequireDefaultConstructor();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beansRequireSerializable() {
super.beansRequireSerializable();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beansRequireSettersForGetters() {
super.beansRequireSettersForGetters();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder dictionaryOn(Class<?> on, java.lang.Class<?>...values) {
super.dictionaryOn(on, values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder disableBeansRequireSomeProperties() {
super.disableBeansRequireSomeProperties();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder disableIgnoreMissingSetters() {
super.disableIgnoreMissingSetters();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder disableIgnoreTransientFields() {
super.disableIgnoreTransientFields();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder disableIgnoreUnknownNullBeanProperties() {
super.disableIgnoreUnknownNullBeanProperties();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder disableInterfaceProxies() {
super.disableInterfaceProxies();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public <T> Builder example(Class<T> pojoClass, T o) {
super.example(pojoClass, o);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public <T> Builder example(Class<T> pojoClass, String json) {
super.example(pojoClass, json);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder findFluentSetters() {
super.findFluentSetters();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder findFluentSetters(Class<?> on) {
super.findFluentSetters(on);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder ignoreInvocationExceptionsOnGetters() {
super.ignoreInvocationExceptionsOnGetters();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder ignoreInvocationExceptionsOnSetters() {
super.ignoreInvocationExceptionsOnSetters();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder ignoreUnknownBeanProperties() {
super.ignoreUnknownBeanProperties();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder ignoreUnknownEnumValues() {
super.ignoreUnknownEnumValues();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder implClass(Class<?> interfaceClass, Class<?> implClass) {
super.implClass(interfaceClass, implClass);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder implClasses(Map<Class<?>,Class<?>> values) {
super.implClasses(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder interfaceClass(Class<?> on, Class<?> value) {
super.interfaceClass(on, value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder interfaces(java.lang.Class<?>...value) {
super.interfaces(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder locale(Locale value) {
super.locale(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder mediaType(MediaType value) {
super.mediaType(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder notBeanClasses(java.lang.Class<?>...values) {
super.notBeanClasses(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder notBeanPackages(String...values) {
super.notBeanPackages(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder propertyNamer(Class<? extends org.apache.juneau.PropertyNamer> value) {
super.propertyNamer(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder propertyNamer(Class<?> on, Class<? extends org.apache.juneau.PropertyNamer> value) {
super.propertyNamer(on, value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder sortProperties() {
super.sortProperties();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder sortProperties(java.lang.Class<?>...on) {
super.sortProperties(on);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder stopClass(Class<?> on, Class<?> value) {
super.stopClass(on, value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public <T, S> Builder swap(Class<T> normalClass, Class<S> swappedClass, ThrowingFunction<T,S> swapFunction) {
super.swap(normalClass, swappedClass, swapFunction);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public <T, S> Builder swap(Class<T> normalClass, Class<S> swappedClass, ThrowingFunction<T,S> swapFunction, ThrowingFunction<S,T> unswapFunction) {
super.swap(normalClass, swappedClass, swapFunction, unswapFunction);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder swaps(Object...values) {
super.swaps(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder swaps(Class<?>...values) {
super.swaps(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder timeZone(TimeZone value) {
super.timeZone(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder typeName(Class<?> on, String value) {
super.typeName(on, value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder typePropertyName(String value) {
super.typePropertyName(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder typePropertyName(Class<?> on, String value) {
super.typePropertyName(on, value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder useEnumNames() {
super.useEnumNames();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder useJavaBeanIntrospector() {
super.useJavaBeanIntrospector();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanTraverseContext.Builder */
public Builder detectRecursions() {
super.detectRecursions();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanTraverseContext.Builder */
public Builder detectRecursions(boolean value) {
super.detectRecursions(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanTraverseContext.Builder */
public Builder ignoreRecursions() {
super.ignoreRecursions();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanTraverseContext.Builder */
public Builder ignoreRecursions(boolean value) {
super.ignoreRecursions(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanTraverseContext.Builder */
public Builder initialDepth(int value) {
super.initialDepth(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanTraverseContext.Builder */
public Builder maxDepth(int value) {
super.maxDepth(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder accept(String value) {
super.accept(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder addBeanTypes() {
super.addBeanTypes();
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder addBeanTypes(boolean value) {
super.addBeanTypes(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder addRootType() {
super.addRootType();
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder addRootType(boolean value) {
super.addRootType(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder keepNullProperties() {
super.keepNullProperties();
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder keepNullProperties(boolean value) {
super.keepNullProperties(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder listener(Class<? extends org.apache.juneau.serializer.SerializerListener> value) {
super.listener(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder produces(String value) {
super.produces(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder sortCollections() {
super.sortCollections();
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder sortCollections(boolean value) {
super.sortCollections(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder sortMaps() {
super.sortMaps();
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder sortMaps(boolean value) {
super.sortMaps(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder trimEmptyCollections() {
super.trimEmptyCollections();
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder trimEmptyCollections(boolean value) {
super.trimEmptyCollections(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder trimEmptyMaps() {
super.trimEmptyMaps();
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder trimEmptyMaps(boolean value) {
super.trimEmptyMaps(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder trimStrings() {
super.trimStrings();
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder trimStrings(boolean value) {
super.trimStrings(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder uriContext(UriContext value) {
super.uriContext(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder uriRelativity(UriRelativity value) {
super.uriRelativity(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.Serializer.Builder */
public Builder uriResolution(UriResolution value) {
super.uriResolution(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.WriterSerializer.Builder */
public Builder fileCharset(Charset value) {
super.fileCharset(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.WriterSerializer.Builder */
public Builder maxIndent(int value) {
super.maxIndent(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.WriterSerializer.Builder */
public Builder quoteChar(char value) {
super.quoteChar(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.WriterSerializer.Builder */
public Builder quoteCharOverride(char value) {
super.quoteCharOverride(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.WriterSerializer.Builder */
public Builder sq() {
super.sq();
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.WriterSerializer.Builder */
public Builder streamCharset(Charset value) {
super.streamCharset(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.WriterSerializer.Builder */
public Builder useWhitespace() {
super.useWhitespace();
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.WriterSerializer.Builder */
public Builder useWhitespace(boolean value) {
super.useWhitespace(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.serializer.WriterSerializer.Builder */
public Builder ws() {
super.ws();
return this;
}
@Override /* GENERATED - org.apache.juneau.uon.UonSerializer.Builder */
public Builder addBeanTypesUon() {
super.addBeanTypesUon();
return this;
}
@Override /* GENERATED - org.apache.juneau.uon.UonSerializer.Builder */
public Builder addBeanTypesUon(boolean value) {
super.addBeanTypesUon(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.uon.UonSerializer.Builder */
public Builder encoding() {
super.encoding();
return this;
}
@Override /* GENERATED - org.apache.juneau.uon.UonSerializer.Builder */
public Builder paramFormat(ParamFormat value) {
super.paramFormat(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.uon.UonSerializer.Builder */
public Builder paramFormatPlain() {
super.paramFormatPlain();
return this;
}
@Override /* GENERATED - org.apache.juneau.uon.UonSerializer.Builder */
public Builder quoteCharUon(char value) {
super.quoteCharUon(value);
return this;
}
// </FluentSetters>
}
//-------------------------------------------------------------------------------------------------------------------
// Instance
//-------------------------------------------------------------------------------------------------------------------
final HttpPartFormat format;
final HttpPartCollectionFormat collectionFormat;
private final Map<ClassMeta<?>,OpenApiClassMeta> openApiClassMetas = new ConcurrentHashMap<>();
private final Map<BeanPropertyMeta,OpenApiBeanPropertyMeta> openApiBeanPropertyMetas = new ConcurrentHashMap<>();
/**
* Constructor.
*
* @param builder
* The builder for this object.
*/
public OpenApiSerializer(Builder builder) {
super(builder.encoding(false));
format = builder.format;
collectionFormat = builder.collectionFormat;
}
@Override /* Context */
public Builder copy() {
return new Builder(this);
}
@Override /* Context */
public OpenApiSerializerSession.Builder createSession() {
return OpenApiSerializerSession.create(this);
}
@Override /* Context */
public OpenApiSerializerSession getSession() {
return createSession().build();
}
@Override /* HttpPartSerializer */
public OpenApiSerializerSession getPartSession() {
return OpenApiSerializerSession.create(this).build();
}
//-----------------------------------------------------------------------------------------------------------------
// Extended metadata
//-----------------------------------------------------------------------------------------------------------------
@Override /* OpenApiMetaProvider */
public OpenApiClassMeta getOpenApiClassMeta(ClassMeta<?> cm) {
OpenApiClassMeta m = openApiClassMetas.get(cm);
if (m == null) {
m = new OpenApiClassMeta(cm, this);
openApiClassMetas.put(cm, m);
}
return m;
}
@Override /* OpenApiMetaProvider */
public OpenApiBeanPropertyMeta getOpenApiBeanPropertyMeta(BeanPropertyMeta bpm) {
if (bpm == null)
return OpenApiBeanPropertyMeta.DEFAULT;
OpenApiBeanPropertyMeta m = openApiBeanPropertyMetas.get(bpm);
if (m == null) {
m = new OpenApiBeanPropertyMeta(bpm.getDelegateFor(), this);
openApiBeanPropertyMetas.put(bpm, m);
}
return m;
}
//-----------------------------------------------------------------------------------------------------------------
// Properties
//-----------------------------------------------------------------------------------------------------------------
/**
* Returns the default format to use when not otherwise specified via {@link Schema#format()}
*
* @return The default format to use when not otherwise specified via {@link Schema#format()}
*/
protected final HttpPartFormat getFormat() {
return format;
}
/**
* Returns the default collection format to use when not otherwise specified via {@link Schema#collectionFormat()}
*
* @return The default collection format to use when not otherwise specified via {@link Schema#collectionFormat()}
*/
protected final HttpPartCollectionFormat getCollectionFormat() {
return collectionFormat;
}
}
|
apache/shiro | 36,464 | core/src/main/java/org/apache/shiro/realm/AuthenticatingRealm.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.shiro.realm;
import org.apache.shiro.authc.AuthenticationException;
import org.apache.shiro.authc.AuthenticationInfo;
import org.apache.shiro.authc.AuthenticationToken;
import org.apache.shiro.authc.IncorrectCredentialsException;
import org.apache.shiro.authc.UsernamePasswordToken;
import org.apache.shiro.authc.credential.AllowAllCredentialsMatcher;
import org.apache.shiro.authc.credential.CredentialsMatcher;
import org.apache.shiro.authc.credential.SimpleCredentialsMatcher;
import org.apache.shiro.cache.Cache;
import org.apache.shiro.cache.CacheManager;
import org.apache.shiro.subject.PrincipalCollection;
import org.apache.shiro.lang.util.Initializable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.atomic.AtomicInteger;
/**
* A top-level abstract implementation of the <tt>Realm</tt> interface that only implements authentication support
* (log-in) operations and leaves authorization (access control) behavior to subclasses.
* <h2>Authentication Caching</h2>
* For applications that perform frequent repeated authentication of the same accounts (e.g. as is often done in
* REST or Soap applications that authenticate on every request), it might be prudent to enable authentication
* caching to alleviate constant load on any back-end data sources.
* <p/>
* This feature is disabled by default to retain backwards-compatibility with Shiro 1.1 and earlier. It may be
* enabled by setting {@link #setAuthenticationCachingEnabled(boolean) authenticationCachingEnabled} = {@code true}
* (and configuring Shiro with a {@link CacheManager} of course), but <b>NOTE:</b>
* <p/>
* <b>ONLY enable authentication caching if either of the following is true for your realm implementation:</b>
* <ul>
* <li>The {@link #doGetAuthenticationInfo(org.apache.shiro.authc.AuthenticationToken) doGetAuthenticationInfo}
* implementation returns {@code AuthenticationInfo} instances where the
* {@link org.apache.shiro.authc.AuthenticationInfo#getCredentials() credentials} are securely obfuscated and NOT
* plaintext (raw) credentials. For example,
* if your realm references accounts with passwords, that the {@code AuthenticationInfo}'s
* {@link org.apache.shiro.authc.AuthenticationInfo#getCredentials() credentials} are safely hashed and salted or otherwise
* fully encrypted.<br/><br/></li>
* <li>The {@link #doGetAuthenticationInfo(org.apache.shiro.authc.AuthenticationToken) doGetAuthenticationInfo}
* implementation returns {@code AuthenticationInfo} instances where the
* {@link org.apache.shiro.authc.AuthenticationInfo#getCredentials() credentials} are plaintext (raw) <b>AND</b> the
* cache region storing the {@code AuthenticationInfo} instances WILL NOT overflow to disk and WILL NOT transmit cache
* entries over an unprotected (non TLS/SSL) network (as might be the case with a networked/distributed enterprise cache).
* This should be the case even in private/trusted/corporate networks.</li>
* </ul>
* <p/>
* These points are very important because if authentication caching is enabled, this abstract class implementation
* will place AuthenticationInfo instances returned from the subclass implementations directly into the cache, for
* example:
* <pre>
* cache.put(cacheKey, subclassAuthenticationInfoInstance);
* </pre>
* <p/>
* Enabling authentication caching is ONLY safe to do if the above two scenarios apply. It is NOT safe to enable under
* any other scenario.
* <p/>
* When possible, always represent and store credentials in a safe form (hash+salt or encrypted) to eliminate plaintext
* visibility.
* <h3>Authentication Cache Invalidation on Logout</h3>
* If authentication caching is enabled, this implementation will attempt to evict (remove) cached authentication data
* for an account during logout. This can only occur if the
* {@link #getAuthenticationCacheKey(org.apache.shiro.authc.AuthenticationToken)} and
* {@link #getAuthenticationCacheKey(org.apache.shiro.subject.PrincipalCollection)} methods return the exact same value.
* <p/>
* The default implementations of these methods expect that the
* {@link org.apache.shiro.authc.AuthenticationToken#getPrincipal()} (what the user submits during login) and
* {@link #getAvailablePrincipal(org.apache.shiro.subject.PrincipalCollection) getAvailablePrincipal} (what is returned
* by the realm after account lookup) return
* the same exact value. For example, the user submitted username is also the primary account identifier.
* <p/>
* However, if your application uses, say, a username for end-user login, but returns a primary key ID as the
* primary principal after authentication, then you will need to override either
* {@link #getAuthenticationCacheKey(org.apache.shiro.authc.AuthenticationToken) getAuthenticationCacheKey(token)} or
* {@link #getAuthenticationCacheKey(org.apache.shiro.subject.PrincipalCollection) getAuthenticationCacheKey(principals)}
* (or both) to ensure that the same cache key can be used for either object.
* <p/>
* This guarantees that the same cache key used to cache the data during authentication (derived from the
* {@code AuthenticationToken}) will be used to remove the cached data during logout (derived from the
* {@code PrincipalCollection}).
* <h4>Unmatching Cache Key Values</h4>
* If the return values from {@link #getAuthenticationCacheKey(org.apache.shiro.authc.AuthenticationToken)} and
* {@link #getAuthenticationCacheKey(org.apache.shiro.subject.PrincipalCollection)} are not identical, cached
* authentication data removal is at the mercy of your cache provider settings. For example, often cache
* implementations will evict cache entries based on a timeToIdle or timeToLive (TTL) value.
* <p/>
* If this lazy eviction capability of the cache product is not sufficient and you want discrete behavior
* (highly recommended for authentication data), ensure that the return values from those two methods are identical in
* the subclass implementation.
*
* @since 0.2
*/
public abstract class AuthenticatingRealm extends CachingRealm implements Initializable {
private static final Logger LOGGER = LoggerFactory.getLogger(AuthenticatingRealm.class);
private static final AtomicInteger INSTANCE_COUNT = new AtomicInteger();
/**
* The default suffix appended to the realm name used for caching authentication data.
*
* @since 1.2
*/
private static final String DEFAULT_AUTHENTICATION_CACHE_SUFFIX = ".authenticationCache";
/**
* Credentials matcher used to determine if the provided credentials match the credentials stored in the data store.
*/
private CredentialsMatcher credentialsMatcher;
private Cache<Object, AuthenticationInfo> authenticationCache;
private boolean authenticationCachingEnabled;
private String authenticationCacheName;
/**
* The class that this realm supports for authentication tokens. This is used by the
* default implementation of the {@link Realm#supports(org.apache.shiro.authc.AuthenticationToken)} method to
* determine whether or not the given authentication token is supported by this realm.
*/
private Class<? extends AuthenticationToken> authenticationTokenClass;
/*-------------------------------------------
| C O N S T R U C T O R S |
============================================*/
public AuthenticatingRealm() {
this(null, new SimpleCredentialsMatcher());
}
public AuthenticatingRealm(CacheManager cacheManager) {
this(cacheManager, new SimpleCredentialsMatcher());
}
public AuthenticatingRealm(CredentialsMatcher matcher) {
this(null, matcher);
}
public AuthenticatingRealm(CacheManager cacheManager, CredentialsMatcher matcher) {
authenticationTokenClass = UsernamePasswordToken.class;
//retain backwards compatibility for Shiro 1.1 and earlier. Setting to true by default will probably cause
//unexpected results for existing applications:
this.authenticationCachingEnabled = false;
int instanceNumber = INSTANCE_COUNT.getAndIncrement();
this.authenticationCacheName = getClass().getName() + DEFAULT_AUTHENTICATION_CACHE_SUFFIX;
if (instanceNumber > 0) {
this.authenticationCacheName = this.authenticationCacheName + "." + instanceNumber;
}
if (cacheManager != null) {
setCacheManager(cacheManager);
}
if (matcher != null) {
setCredentialsMatcher(matcher);
}
}
/*--------------------------------------------
| A C C E S S O R S / M O D I F I E R S |
============================================*/
/**
* Returns the <code>CredentialsMatcher</code> used during an authentication attempt to verify submitted
* credentials with those stored in the system.
* <p/>
* <p>Unless overridden by the {@link #setCredentialsMatcher setCredentialsMatcher} method, the default
* value is a {@link org.apache.shiro.authc.credential.SimpleCredentialsMatcher SimpleCredentialsMatcher} instance.
*
* @return the <code>CredentialsMatcher</code> used during an authentication attempt to verify submitted
* credentials with those stored in the system.
*/
public CredentialsMatcher getCredentialsMatcher() {
return credentialsMatcher;
}
/**
* Sets the CredentialsMatcher used during an authentication attempt to verify submitted credentials with those
* stored in the system. The implementation of this matcher can be switched via configuration to
* support any number of schemes, including plain text comparisons, hashing comparisons, and others.
* <p/>
* <p>Unless overridden by this method, the default value is a
* {@link org.apache.shiro.authc.credential.SimpleCredentialsMatcher} instance.
*
* @param credentialsMatcher the matcher to use.
*/
public void setCredentialsMatcher(CredentialsMatcher credentialsMatcher) {
this.credentialsMatcher = credentialsMatcher;
}
/**
* Returns the authenticationToken class supported by this realm.
* <p/>
* <p>The default value is <tt>{@link org.apache.shiro.authc.UsernamePasswordToken UsernamePasswordToken.class}</tt>, since
* about 90% of realms use username/password authentication, regardless of their protocol (e.g. over jdbc, ldap,
* kerberos, http, etc.).
* <p/>
* <p>If subclasses haven't already overridden the {@link Realm#supports Realm.supports(AuthenticationToken)} method,
* they must {@link #setAuthenticationTokenClass(Class) set a new class} if they won't support
* <tt>UsernamePasswordToken</tt> authentication token submissions.
*
* @return the authenticationToken class supported by this realm.
* @see #setAuthenticationTokenClass
*/
public Class<? extends AuthenticationToken> getAuthenticationTokenClass() {
return authenticationTokenClass;
}
/**
* Sets the authenticationToken class supported by this realm.
* <p/>
* <p>Unless overridden by this method, the default value is
* {@link org.apache.shiro.authc.UsernamePasswordToken UsernamePasswordToken.class} to support the majority of applications.
*
* @param authenticationTokenClass the class of authentication token instances supported by this realm.
* @see #getAuthenticationTokenClass getAuthenticationTokenClass() for more explanation.
*/
public void setAuthenticationTokenClass(Class<? extends AuthenticationToken> authenticationTokenClass) {
this.authenticationTokenClass = authenticationTokenClass;
}
/**
* Sets an explicit {@link Cache} instance to use for authentication caching. If not set and authentication
* caching is {@link #isAuthenticationCachingEnabled() enabled}, any available
* {@link #getCacheManager() cacheManager} will be used to acquire the cache instance if available.
* <p/>
* <b>WARNING:</b> Only set this property if safe caching conditions apply, as documented at the top
* of this page in the class-level JavaDoc.
*
* @param authenticationCache an explicit {@link Cache} instance to use for authentication caching or
* {@code null} if the cache should possibly be obtained another way.
* @see #isAuthenticationCachingEnabled()
* @since 1.2
*/
public void setAuthenticationCache(Cache<Object, AuthenticationInfo> authenticationCache) {
this.authenticationCache = authenticationCache;
}
/**
* Returns a {@link Cache} instance to use for authentication caching, or {@code null} if no cache has been
* set.
*
* @return a {@link Cache} instance to use for authentication caching, or {@code null} if no cache has been
* set.
* @see #setAuthenticationCache(org.apache.shiro.cache.Cache)
* @see #isAuthenticationCachingEnabled()
* @since 1.2
*/
public Cache<Object, AuthenticationInfo> getAuthenticationCache() {
return this.authenticationCache;
}
/**
* Returns the name of a {@link Cache} to lookup from any available {@link #getCacheManager() cacheManager} if
* a cache is not explicitly configured via {@link #setAuthenticationCache(org.apache.shiro.cache.Cache)}.
* <p/>
* This name will only be used to look up a cache if authentication caching is
* {@link #isAuthenticationCachingEnabled() enabled}.
* <p/>
* <b>WARNING:</b> Only set this property if safe caching conditions apply, as documented at the top
* of this page in the class-level JavaDoc.
*
* @return the name of a {@link Cache} to lookup from any available {@link #getCacheManager() cacheManager} if
* a cache is not explicitly configured via {@link #setAuthenticationCache(org.apache.shiro.cache.Cache)}.
* @see #isAuthenticationCachingEnabled()
* @since 1.2
*/
public String getAuthenticationCacheName() {
return this.authenticationCacheName;
}
/**
* Sets the name of a {@link Cache} to lookup from any available {@link #getCacheManager() cacheManager} if
* a cache is not explicitly configured via {@link #setAuthenticationCache(org.apache.shiro.cache.Cache)}.
* <p/>
* This name will only be used to look up a cache if authentication caching is
* {@link #isAuthenticationCachingEnabled() enabled}.
*
* @param authenticationCacheName the name of a {@link Cache} to lookup from any available
* {@link #getCacheManager() cacheManager} if a cache is not explicitly configured
* via {@link #setAuthenticationCache(org.apache.shiro.cache.Cache)}.
* @see #isAuthenticationCachingEnabled()
* @since 1.2
*/
public void setAuthenticationCacheName(String authenticationCacheName) {
this.authenticationCacheName = authenticationCacheName;
}
/**
* Returns {@code true} if authentication caching should be utilized if a {@link CacheManager} has been
* {@link #setCacheManager(org.apache.shiro.cache.CacheManager) configured}, {@code false} otherwise.
* <p/>
* The default value is {@code true}.
*
* @return {@code true} if authentication caching should be utilized, {@code false} otherwise.
*/
public boolean isAuthenticationCachingEnabled() {
return this.authenticationCachingEnabled && isCachingEnabled();
}
/**
* Sets whether or not authentication caching should be utilized if a {@link CacheManager} has been
* {@link #setCacheManager(org.apache.shiro.cache.CacheManager) configured}, {@code false} otherwise.
* <p/>
* The default value is {@code false} to retain backwards compatibility with Shiro 1.1 and earlier.
* <p/>
* <b>WARNING:</b> Only set this property to {@code true} if safe caching conditions apply, as documented at the top
* of this page in the class-level JavaDoc.
*
* @param authenticationCachingEnabled the value to set
*/
@SuppressWarnings({"UnusedDeclaration"})
public void setAuthenticationCachingEnabled(boolean authenticationCachingEnabled) {
this.authenticationCachingEnabled = authenticationCachingEnabled;
if (authenticationCachingEnabled) {
setCachingEnabled(true);
}
}
public void setName(String name) {
super.setName(name);
String authcCacheName = this.authenticationCacheName;
if (authcCacheName != null && authcCacheName.startsWith(getClass().getName())) {
//get rid of the default heuristically-created cache name. Create a more meaningful one
//based on the application-unique Realm name:
this.authenticationCacheName = name + DEFAULT_AUTHENTICATION_CACHE_SUFFIX;
}
}
/*--------------------------------------------
| M E T H O D S |
============================================*/
/**
* Convenience implementation that returns
* <tt>getAuthenticationTokenClass().isAssignableFrom( token.getClass() );</tt>. Can be overridden
* by subclasses for more complex token checking.
* <p>Most configurations will only need to set a different class via
* {@link #setAuthenticationTokenClass}, as opposed to overriding this method.
*
* @param token the token being submitted for authentication.
* @return true if this authentication realm can process the submitted token instance of the class, false otherwise.
*/
public boolean supports(AuthenticationToken token) {
return token != null && getAuthenticationTokenClass().isAssignableFrom(token.getClass());
}
/**
* Initializes this realm and potentially enables an authentication cache, depending on configuration. Based on
* the availability of an authentication cache, this class functions as follows:
* <ol>
* <li>If the {@link #setAuthenticationCache cache} property has been set, it will be
* used to cache the AuthenticationInfo objects returned from {@link #getAuthenticationInfo}
* method invocations.
* All future calls to {@link #getAuthenticationInfo} will attempt to use this cache first
* to alleviate any potentially unnecessary calls to an underlying data store.</li>
* <li>If the {@link #setAuthenticationCache cache} property has <b>not</b> been set,
* the {@link #setCacheManager cacheManager} property will be checked.
* If a {@code cacheManager} has been set, it will be used to eagerly acquire an authentication
* {@code cache}, and this cache which will be used as specified in #1.</li>
* <li>If neither the {@link #setAuthenticationCache (org.apache.shiro.cache.Cache) authenticationCache}
* or {@link #setCacheManager(org.apache.shiro.cache.CacheManager) cacheManager}
* properties are set, caching will not be utilized and authentication look-ups will be delegated to
* subclass implementations for each authentication attempt.</li>
* </ol>
* <p/>
* This method finishes by calling {@link #onInit()} is to allow subclasses to perform any init behavior desired.
*
* @since 1.2
*/
public final void init() {
//trigger obtaining the authentication cache if possible
getAvailableAuthenticationCache();
onInit();
}
/**
* Template method for subclasses to implement any initialization logic. Called from
* {@link #init()}.
*
* @since 1.2
*/
protected void onInit() {
}
/**
* This implementation attempts to acquire an authentication cache if one is not already configured.
*
* @since 1.2
*/
protected void afterCacheManagerSet() {
//trigger obtaining the authentication cache if possible
getAvailableAuthenticationCache();
}
/**
* Returns any available {@link Cache} instance to use for authentication caching. This functions as follows:
* <ol>
* <li>If an {@link #setAuthenticationCache(org.apache.shiro.cache.Cache) authenticationCache} has been explicitly
* configured (it is not null), it is returned.</li>
* <li>If there is no {@link #getAuthenticationCache() authenticationCache} configured:
* <ol>
* <li>If authentication caching is {@link #isAuthenticationCachingEnabled() enabled}, any available
* {@link #getCacheManager() cacheManager} will be consulted to obtain an available authentication cache.
* </li>
* <li>If authentication caching is disabled, this implementation does nothing.</li>
* </ol>
* </li>
* </ol>
*
* @return any available {@link Cache} instance to use for authentication caching.
*/
private Cache<Object, AuthenticationInfo> getAvailableAuthenticationCache() {
Cache<Object, AuthenticationInfo> cache = getAuthenticationCache();
boolean authcCachingEnabled = isAuthenticationCachingEnabled();
if (cache == null && authcCachingEnabled) {
cache = getAuthenticationCacheLazy();
}
return cache;
}
/**
* Checks to see if the authenticationCache class attribute is null, and if so, attempts to acquire one from
* any configured {@link #getCacheManager() cacheManager}. If one is acquired, it is set as the class attribute.
* The class attribute is then returned.
*
* @return an available cache instance to be used for authentication caching or {@code null} if one is not available.
* @since 1.2
*/
private Cache<Object, AuthenticationInfo> getAuthenticationCacheLazy() {
if (this.authenticationCache == null) {
LOGGER.trace("No authenticationCache instance set. Checking for a cacheManager...");
CacheManager cacheManager = getCacheManager();
if (cacheManager != null) {
String cacheName = getAuthenticationCacheName();
LOGGER.debug("CacheManager [{}] configured. Building authentication cache '{}'", cacheManager, cacheName);
this.authenticationCache = cacheManager.getCache(cacheName);
}
}
return this.authenticationCache;
}
/**
* Returns any cached AuthenticationInfo corresponding to the specified token or {@code null} if there currently
* isn't any cached data.
*
* @param token the token submitted during the authentication attempt.
* @return any cached AuthenticationInfo corresponding to the specified token or {@code null} if there currently
* isn't any cached data.
* @since 1.2
*/
private AuthenticationInfo getCachedAuthenticationInfo(AuthenticationToken token) {
AuthenticationInfo info = null;
Cache<Object, AuthenticationInfo> cache = getAvailableAuthenticationCache();
if (cache != null && token != null) {
LOGGER.trace("Attempting to retrieve the AuthenticationInfo from cache.");
Object key = getAuthenticationCacheKey(token);
info = cache.get(key);
if (info == null) {
LOGGER.trace("No AuthenticationInfo found in cache for key [{}]", key);
} else {
LOGGER.trace("Found cached AuthenticationInfo for key [{}]", key);
}
}
return info;
}
/**
* Caches the specified info if authentication caching
* {@link #isAuthenticationCachingEnabled(AuthenticationToken, AuthenticationInfo) isEnabled}
* for the specific token/info pair and a cache instance is available to be used.
*
* @param token the authentication token submitted which resulted in a successful authentication attempt.
* @param info the AuthenticationInfo to cache as a result of the successful authentication attempt.
* @since 1.2
*/
private void cacheAuthenticationInfoIfPossible(AuthenticationToken token, AuthenticationInfo info) {
if (!isAuthenticationCachingEnabled(token, info)) {
LOGGER.debug("AuthenticationInfo caching is disabled for info [{}]. Submitted token: [{}].", info, token);
//return quietly, caching is disabled for this token/info pair:
return;
}
Cache<Object, AuthenticationInfo> cache = getAvailableAuthenticationCache();
if (cache != null) {
Object key = getAuthenticationCacheKey(token);
cache.put(key, info);
LOGGER.trace("Cached AuthenticationInfo for continued authentication. key=[{}], value=[{}].", key, info);
}
}
/**
* Returns {@code true} if authentication caching should be utilized based on the specified
* {@link AuthenticationToken} and/or {@link AuthenticationInfo}, {@code false} otherwise.
* <p/>
* The default implementation simply delegates to {@link #isAuthenticationCachingEnabled()}, the general-case
* authentication caching setting. Subclasses can override this to turn on or off caching at runtime
* based on the specific submitted runtime values.
*
* @param token the submitted authentication token
* @param info the {@code AuthenticationInfo} acquired from data source lookup via
* {@link #doGetAuthenticationInfo(org.apache.shiro.authc.AuthenticationToken)}
* @return {@code true} if authentication caching should be utilized based on the specified
* {@link AuthenticationToken} and/or {@link AuthenticationInfo}, {@code false} otherwise.
* @since 1.2
*/
protected boolean isAuthenticationCachingEnabled(AuthenticationToken token, AuthenticationInfo info) {
return isAuthenticationCachingEnabled();
}
/**
* This implementation functions as follows:
* <ol>
* <li>It attempts to acquire any cached {@link AuthenticationInfo} corresponding to the specified
* {@link AuthenticationToken} argument. If a cached value is found, it will be used for credentials matching,
* alleviating the need to perform any lookups with a data source.</li>
* <li>If there is no cached {@link AuthenticationInfo} found, delegate to the
* {@link #doGetAuthenticationInfo(org.apache.shiro.authc.AuthenticationToken)} method to perform the actual
* lookup. If authentication caching is enabled and possible, any returned info object will be
* {@link #cacheAuthenticationInfoIfPossible(AuthenticationToken, AuthenticationInfo) cached}
* to be used in future authentication attempts.</li>
* <li>If an AuthenticationInfo instance is not found in the cache or by lookup, {@code null} is returned to
* indicate an account cannot be found.</li>
* <li>If an AuthenticationInfo instance is found (either cached or via lookup), ensure the submitted
* AuthenticationToken's credentials match the expected {@code AuthenticationInfo}'s credentials using the
* {@link #getCredentialsMatcher() credentialsMatcher}. This means that credentials are always verified
* for an authentication attempt.</li>
* </ol>
*
* @param token the submitted account principal and credentials.
* @return the AuthenticationInfo corresponding to the given {@code token}, or {@code null} if no
* AuthenticationInfo could be found.
* @throws AuthenticationException if authentication failed.
*/
public final AuthenticationInfo getAuthenticationInfo(AuthenticationToken token) throws AuthenticationException {
AuthenticationInfo info = getCachedAuthenticationInfo(token);
if (info == null) {
//otherwise not cached, perform the lookup:
info = doGetAuthenticationInfo(token);
LOGGER.debug("Looked up AuthenticationInfo [{}] from doGetAuthenticationInfo", info);
if (token != null && info != null) {
cacheAuthenticationInfoIfPossible(token, info);
}
} else {
LOGGER.debug("Using cached authentication info [{}] to perform credentials matching.", info);
}
if (info != null) {
assertCredentialsMatch(token, info);
} else {
LOGGER.debug("No AuthenticationInfo found for submitted AuthenticationToken [{}]. Returning null.", token);
}
return info;
}
/**
* Asserts that the submitted {@code AuthenticationToken}'s credentials match the stored account
* {@code AuthenticationInfo}'s credentials, and if not, throws an {@link AuthenticationException}.
*
* @param token the submitted authentication token
* @param info the AuthenticationInfo corresponding to the given {@code token}
* @throws AuthenticationException if the token's credentials do not match the stored account credentials.
*/
protected void assertCredentialsMatch(AuthenticationToken token, AuthenticationInfo info) throws AuthenticationException {
CredentialsMatcher cm = getCredentialsMatcher();
if (cm != null) {
if (!cm.doCredentialsMatch(token, info)) {
//not successful - throw an exception to indicate this:
String msg = "Submitted credentials for token [" + token + "] did not match the expected credentials.";
throw new IncorrectCredentialsException(msg);
}
} else {
throw new AuthenticationException("A CredentialsMatcher must be configured in order to verify "
+ "credentials during authentication. If you do not wish for credentials to be examined, you "
+ "can configure an " + AllowAllCredentialsMatcher.class.getName() + " instance.");
}
}
/**
* Returns the key under which {@link AuthenticationInfo} instances are cached if authentication caching is enabled.
* This implementation defaults to returning the token's
* {@link org.apache.shiro.authc.AuthenticationToken#getPrincipal() principal}, which is usually a username in
* most applications.
* <h3>Cache Invalidation on Logout</h3>
* <b>NOTE:</b> If you want to be able to invalidate an account's cached {@code AuthenticationInfo} on logout, you
* must ensure the {@link #getAuthenticationCacheKey(org.apache.shiro.subject.PrincipalCollection)} method returns
* the same value as this method.
*
* @param token the authentication token for which any successful authentication will be cached.
* @return the cache key to use to cache the associated {@link AuthenticationInfo} after a successful authentication.
* @since 1.2
*/
protected Object getAuthenticationCacheKey(AuthenticationToken token) {
return token != null ? token.getPrincipal() : null;
}
/**
* Returns the key under which {@link AuthenticationInfo} instances are cached if authentication caching is enabled.
* This implementation delegates to
* {@link #getAvailablePrincipal(org.apache.shiro.subject.PrincipalCollection)}, which returns the primary principal
* associated with this particular Realm.
* <h3>Cache Invalidation on Logout</h3>
* <b>NOTE:</b> If you want to be able to invalidate an account's cached {@code AuthenticationInfo} on logout, you
* must ensure that this method returns the same value as the
* {@link #getAuthenticationCacheKey(org.apache.shiro.authc.AuthenticationToken)} method!
*
* @param principals the principals of the account for which to set or remove cached {@code AuthenticationInfo}.
* @return the cache key to use when looking up cached {@link AuthenticationInfo} instances.
* @since 1.2
*/
protected Object getAuthenticationCacheKey(PrincipalCollection principals) {
return getAvailablePrincipal(principals);
}
/**
* This implementation clears out any cached authentication data by calling
* {@link #clearCachedAuthenticationInfo(org.apache.shiro.subject.PrincipalCollection)}.
* If overriding in a subclass, be sure to call {@code super.doClearCache} to ensure this behavior is maintained.
*
* @param principals principals the principals of the account for which to clear any cached data.
* @since 1.2
*/
@Override
protected void doClearCache(PrincipalCollection principals) {
super.doClearCache(principals);
clearCachedAuthenticationInfo(principals);
}
private static boolean isEmpty(PrincipalCollection pc) {
return pc == null || pc.isEmpty();
}
/**
* Clears out the AuthenticationInfo cache entry for the specified account.
* <p/>
* This method is provided as a convenience to subclasses so they can invalidate a cache entry when they
* change an account's authentication data (e.g. reset password) during runtime. Because an account's
* AuthenticationInfo can be cached, there needs to be a way to invalidate the cache for only that account so that
* subsequent authentication operations don't used the (old) cached value if account data changes.
* <p/>
* After this method is called, the next authentication for that same account will result in a call to
* {@link #doGetAuthenticationInfo(org.apache.shiro.authc.AuthenticationToken) doGetAuthenticationInfo}, and the
* resulting return value will be cached before being returned so it can be reused for later authentications.
* <p/>
* If you wish to clear out all associated cached data (and not just authentication data), use the
* {@link #clearCache(org.apache.shiro.subject.PrincipalCollection)} method instead (which will in turn call this
* method by default).
*
* @param principals the principals of the account for which to clear the cached AuthenticationInfo.
* @see #clearCache(org.apache.shiro.subject.PrincipalCollection)
* @since 1.2
*/
protected void clearCachedAuthenticationInfo(PrincipalCollection principals) {
if (!isEmpty(principals)) {
Cache<Object, AuthenticationInfo> cache = getAvailableAuthenticationCache();
//cache instance will be non-null if caching is enabled:
if (cache != null) {
Object key = getAuthenticationCacheKey(principals);
cache.remove(key);
}
}
}
/**
* Retrieves authentication data from an implementation-specific datasource (RDBMS, LDAP, etc.) for the given
* authentication token.
* <p/>
* For most datasources, this means just 'pulling' authentication data for an associated subject/user and nothing
* more and letting Shiro do the rest. But in some systems, this method could actually perform EIS specific
* log-in logic in addition to just retrieving data - it is up to the Realm implementation.
* <p/>
* A {@code null} return value means that no account could be associated with the specified token.
*
* @param token the authentication token containing the user's principal and credentials.
* @return an {@link AuthenticationInfo} object containing account data resulting from the
* authentication ONLY if the lookup is successful (i.e. account exists and is valid, etc.)
* @throws AuthenticationException if there is an error acquiring data or performing
* realm-specific authentication logic for the specified <tt>token</tt>
*/
protected abstract AuthenticationInfo doGetAuthenticationInfo(AuthenticationToken token) throws AuthenticationException;
}
|
googleapis/google-cloud-java | 35,985 | java-meet/proto-google-cloud-meet-v2beta/src/main/java/com/google/apps/meet/v2beta/ListRecordingsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/apps/meet/v2beta/service.proto
// Protobuf Java Version: 3.25.8
package com.google.apps.meet.v2beta;
/**
*
*
* <pre>
* Response for ListRecordings method.
* </pre>
*
* Protobuf type {@code google.apps.meet.v2beta.ListRecordingsResponse}
*/
public final class ListRecordingsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.apps.meet.v2beta.ListRecordingsResponse)
ListRecordingsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListRecordingsResponse.newBuilder() to construct.
private ListRecordingsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListRecordingsResponse() {
recordings_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListRecordingsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.apps.meet.v2beta.ServiceProto
.internal_static_google_apps_meet_v2beta_ListRecordingsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.apps.meet.v2beta.ServiceProto
.internal_static_google_apps_meet_v2beta_ListRecordingsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.apps.meet.v2beta.ListRecordingsResponse.class,
com.google.apps.meet.v2beta.ListRecordingsResponse.Builder.class);
}
public static final int RECORDINGS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.apps.meet.v2beta.Recording> recordings_;
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2beta.Recording recordings = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.apps.meet.v2beta.Recording> getRecordingsList() {
return recordings_;
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2beta.Recording recordings = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.apps.meet.v2beta.RecordingOrBuilder>
getRecordingsOrBuilderList() {
return recordings_;
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2beta.Recording recordings = 1;</code>
*/
@java.lang.Override
public int getRecordingsCount() {
return recordings_.size();
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2beta.Recording recordings = 1;</code>
*/
@java.lang.Override
public com.google.apps.meet.v2beta.Recording getRecordings(int index) {
return recordings_.get(index);
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2beta.Recording recordings = 1;</code>
*/
@java.lang.Override
public com.google.apps.meet.v2beta.RecordingOrBuilder getRecordingsOrBuilder(int index) {
return recordings_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to be circulated back for further List call if current List doesn't
* include all the recordings. Unset if all recordings are returned.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Token to be circulated back for further List call if current List doesn't
* include all the recordings. Unset if all recordings are returned.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < recordings_.size(); i++) {
output.writeMessage(1, recordings_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < recordings_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, recordings_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.apps.meet.v2beta.ListRecordingsResponse)) {
return super.equals(obj);
}
com.google.apps.meet.v2beta.ListRecordingsResponse other =
(com.google.apps.meet.v2beta.ListRecordingsResponse) obj;
if (!getRecordingsList().equals(other.getRecordingsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getRecordingsCount() > 0) {
hash = (37 * hash) + RECORDINGS_FIELD_NUMBER;
hash = (53 * hash) + getRecordingsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.apps.meet.v2beta.ListRecordingsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.apps.meet.v2beta.ListRecordingsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.apps.meet.v2beta.ListRecordingsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.apps.meet.v2beta.ListRecordingsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.apps.meet.v2beta.ListRecordingsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.apps.meet.v2beta.ListRecordingsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.apps.meet.v2beta.ListRecordingsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.apps.meet.v2beta.ListRecordingsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.apps.meet.v2beta.ListRecordingsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.apps.meet.v2beta.ListRecordingsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.apps.meet.v2beta.ListRecordingsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.apps.meet.v2beta.ListRecordingsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.apps.meet.v2beta.ListRecordingsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response for ListRecordings method.
* </pre>
*
* Protobuf type {@code google.apps.meet.v2beta.ListRecordingsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.apps.meet.v2beta.ListRecordingsResponse)
com.google.apps.meet.v2beta.ListRecordingsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.apps.meet.v2beta.ServiceProto
.internal_static_google_apps_meet_v2beta_ListRecordingsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.apps.meet.v2beta.ServiceProto
.internal_static_google_apps_meet_v2beta_ListRecordingsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.apps.meet.v2beta.ListRecordingsResponse.class,
com.google.apps.meet.v2beta.ListRecordingsResponse.Builder.class);
}
// Construct using com.google.apps.meet.v2beta.ListRecordingsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (recordingsBuilder_ == null) {
recordings_ = java.util.Collections.emptyList();
} else {
recordings_ = null;
recordingsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.apps.meet.v2beta.ServiceProto
.internal_static_google_apps_meet_v2beta_ListRecordingsResponse_descriptor;
}
@java.lang.Override
public com.google.apps.meet.v2beta.ListRecordingsResponse getDefaultInstanceForType() {
return com.google.apps.meet.v2beta.ListRecordingsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.apps.meet.v2beta.ListRecordingsResponse build() {
com.google.apps.meet.v2beta.ListRecordingsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.apps.meet.v2beta.ListRecordingsResponse buildPartial() {
com.google.apps.meet.v2beta.ListRecordingsResponse result =
new com.google.apps.meet.v2beta.ListRecordingsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.apps.meet.v2beta.ListRecordingsResponse result) {
if (recordingsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
recordings_ = java.util.Collections.unmodifiableList(recordings_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.recordings_ = recordings_;
} else {
result.recordings_ = recordingsBuilder_.build();
}
}
private void buildPartial0(com.google.apps.meet.v2beta.ListRecordingsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.apps.meet.v2beta.ListRecordingsResponse) {
return mergeFrom((com.google.apps.meet.v2beta.ListRecordingsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.apps.meet.v2beta.ListRecordingsResponse other) {
if (other == com.google.apps.meet.v2beta.ListRecordingsResponse.getDefaultInstance())
return this;
if (recordingsBuilder_ == null) {
if (!other.recordings_.isEmpty()) {
if (recordings_.isEmpty()) {
recordings_ = other.recordings_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureRecordingsIsMutable();
recordings_.addAll(other.recordings_);
}
onChanged();
}
} else {
if (!other.recordings_.isEmpty()) {
if (recordingsBuilder_.isEmpty()) {
recordingsBuilder_.dispose();
recordingsBuilder_ = null;
recordings_ = other.recordings_;
bitField0_ = (bitField0_ & ~0x00000001);
recordingsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getRecordingsFieldBuilder()
: null;
} else {
recordingsBuilder_.addAllMessages(other.recordings_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.apps.meet.v2beta.Recording m =
input.readMessage(
com.google.apps.meet.v2beta.Recording.parser(), extensionRegistry);
if (recordingsBuilder_ == null) {
ensureRecordingsIsMutable();
recordings_.add(m);
} else {
recordingsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.apps.meet.v2beta.Recording> recordings_ =
java.util.Collections.emptyList();
private void ensureRecordingsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
recordings_ = new java.util.ArrayList<com.google.apps.meet.v2beta.Recording>(recordings_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.apps.meet.v2beta.Recording,
com.google.apps.meet.v2beta.Recording.Builder,
com.google.apps.meet.v2beta.RecordingOrBuilder>
recordingsBuilder_;
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2beta.Recording recordings = 1;</code>
*/
public java.util.List<com.google.apps.meet.v2beta.Recording> getRecordingsList() {
if (recordingsBuilder_ == null) {
return java.util.Collections.unmodifiableList(recordings_);
} else {
return recordingsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2beta.Recording recordings = 1;</code>
*/
public int getRecordingsCount() {
if (recordingsBuilder_ == null) {
return recordings_.size();
} else {
return recordingsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2beta.Recording recordings = 1;</code>
*/
public com.google.apps.meet.v2beta.Recording getRecordings(int index) {
if (recordingsBuilder_ == null) {
return recordings_.get(index);
} else {
return recordingsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2beta.Recording recordings = 1;</code>
*/
public Builder setRecordings(int index, com.google.apps.meet.v2beta.Recording value) {
if (recordingsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRecordingsIsMutable();
recordings_.set(index, value);
onChanged();
} else {
recordingsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2beta.Recording recordings = 1;</code>
*/
public Builder setRecordings(
int index, com.google.apps.meet.v2beta.Recording.Builder builderForValue) {
if (recordingsBuilder_ == null) {
ensureRecordingsIsMutable();
recordings_.set(index, builderForValue.build());
onChanged();
} else {
recordingsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2beta.Recording recordings = 1;</code>
*/
public Builder addRecordings(com.google.apps.meet.v2beta.Recording value) {
if (recordingsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRecordingsIsMutable();
recordings_.add(value);
onChanged();
} else {
recordingsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2beta.Recording recordings = 1;</code>
*/
public Builder addRecordings(int index, com.google.apps.meet.v2beta.Recording value) {
if (recordingsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRecordingsIsMutable();
recordings_.add(index, value);
onChanged();
} else {
recordingsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2beta.Recording recordings = 1;</code>
*/
public Builder addRecordings(com.google.apps.meet.v2beta.Recording.Builder builderForValue) {
if (recordingsBuilder_ == null) {
ensureRecordingsIsMutable();
recordings_.add(builderForValue.build());
onChanged();
} else {
recordingsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2beta.Recording recordings = 1;</code>
*/
public Builder addRecordings(
int index, com.google.apps.meet.v2beta.Recording.Builder builderForValue) {
if (recordingsBuilder_ == null) {
ensureRecordingsIsMutable();
recordings_.add(index, builderForValue.build());
onChanged();
} else {
recordingsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2beta.Recording recordings = 1;</code>
*/
public Builder addAllRecordings(
java.lang.Iterable<? extends com.google.apps.meet.v2beta.Recording> values) {
if (recordingsBuilder_ == null) {
ensureRecordingsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, recordings_);
onChanged();
} else {
recordingsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2beta.Recording recordings = 1;</code>
*/
public Builder clearRecordings() {
if (recordingsBuilder_ == null) {
recordings_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
recordingsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2beta.Recording recordings = 1;</code>
*/
public Builder removeRecordings(int index) {
if (recordingsBuilder_ == null) {
ensureRecordingsIsMutable();
recordings_.remove(index);
onChanged();
} else {
recordingsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2beta.Recording recordings = 1;</code>
*/
public com.google.apps.meet.v2beta.Recording.Builder getRecordingsBuilder(int index) {
return getRecordingsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2beta.Recording recordings = 1;</code>
*/
public com.google.apps.meet.v2beta.RecordingOrBuilder getRecordingsOrBuilder(int index) {
if (recordingsBuilder_ == null) {
return recordings_.get(index);
} else {
return recordingsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2beta.Recording recordings = 1;</code>
*/
public java.util.List<? extends com.google.apps.meet.v2beta.RecordingOrBuilder>
getRecordingsOrBuilderList() {
if (recordingsBuilder_ != null) {
return recordingsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(recordings_);
}
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2beta.Recording recordings = 1;</code>
*/
public com.google.apps.meet.v2beta.Recording.Builder addRecordingsBuilder() {
return getRecordingsFieldBuilder()
.addBuilder(com.google.apps.meet.v2beta.Recording.getDefaultInstance());
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2beta.Recording recordings = 1;</code>
*/
public com.google.apps.meet.v2beta.Recording.Builder addRecordingsBuilder(int index) {
return getRecordingsFieldBuilder()
.addBuilder(index, com.google.apps.meet.v2beta.Recording.getDefaultInstance());
}
/**
*
*
* <pre>
* List of recordings in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2beta.Recording recordings = 1;</code>
*/
public java.util.List<com.google.apps.meet.v2beta.Recording.Builder>
getRecordingsBuilderList() {
return getRecordingsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.apps.meet.v2beta.Recording,
com.google.apps.meet.v2beta.Recording.Builder,
com.google.apps.meet.v2beta.RecordingOrBuilder>
getRecordingsFieldBuilder() {
if (recordingsBuilder_ == null) {
recordingsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.apps.meet.v2beta.Recording,
com.google.apps.meet.v2beta.Recording.Builder,
com.google.apps.meet.v2beta.RecordingOrBuilder>(
recordings_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
recordings_ = null;
}
return recordingsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to be circulated back for further List call if current List doesn't
* include all the recordings. Unset if all recordings are returned.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Token to be circulated back for further List call if current List doesn't
* include all the recordings. Unset if all recordings are returned.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Token to be circulated back for further List call if current List doesn't
* include all the recordings. Unset if all recordings are returned.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to be circulated back for further List call if current List doesn't
* include all the recordings. Unset if all recordings are returned.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to be circulated back for further List call if current List doesn't
* include all the recordings. Unset if all recordings are returned.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.apps.meet.v2beta.ListRecordingsResponse)
}
// @@protoc_insertion_point(class_scope:google.apps.meet.v2beta.ListRecordingsResponse)
private static final com.google.apps.meet.v2beta.ListRecordingsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.apps.meet.v2beta.ListRecordingsResponse();
}
public static com.google.apps.meet.v2beta.ListRecordingsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListRecordingsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListRecordingsResponse>() {
@java.lang.Override
public ListRecordingsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListRecordingsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListRecordingsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.apps.meet.v2beta.ListRecordingsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 36,101 | google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/services/ListExperimentAsyncErrorsResponse.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v19/services/experiment_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v19.services;
/**
* <pre>
* Response message for
* [ExperimentService.ListExperimentAsyncErrors][google.ads.googleads.v19.services.ExperimentService.ListExperimentAsyncErrors].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse}
*/
public final class ListExperimentAsyncErrorsResponse extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse)
ListExperimentAsyncErrorsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListExperimentAsyncErrorsResponse.newBuilder() to construct.
private ListExperimentAsyncErrorsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListExperimentAsyncErrorsResponse() {
errors_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ListExperimentAsyncErrorsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.services.ExperimentServiceProto.internal_static_google_ads_googleads_v19_services_ListExperimentAsyncErrorsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.services.ExperimentServiceProto.internal_static_google_ads_googleads_v19_services_ListExperimentAsyncErrorsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse.class, com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse.Builder.class);
}
public static final int ERRORS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.rpc.Status> errors_;
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.rpc.Status> getErrorsList() {
return errors_;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.rpc.StatusOrBuilder>
getErrorsOrBuilderList() {
return errors_;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
@java.lang.Override
public int getErrorsCount() {
return errors_.size();
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
@java.lang.Override
public com.google.rpc.Status getErrors(int index) {
return errors_.get(index);
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
@java.lang.Override
public com.google.rpc.StatusOrBuilder getErrorsOrBuilder(
int index) {
return errors_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < errors_.size(); i++) {
output.writeMessage(1, errors_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < errors_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, errors_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse)) {
return super.equals(obj);
}
com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse other = (com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse) obj;
if (!getErrorsList()
.equals(other.getErrorsList())) return false;
if (!getNextPageToken()
.equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getErrorsCount() > 0) {
hash = (37 * hash) + ERRORS_FIELD_NUMBER;
hash = (53 * hash) + getErrorsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Response message for
* [ExperimentService.ListExperimentAsyncErrors][google.ads.googleads.v19.services.ExperimentService.ListExperimentAsyncErrors].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse)
com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.services.ExperimentServiceProto.internal_static_google_ads_googleads_v19_services_ListExperimentAsyncErrorsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.services.ExperimentServiceProto.internal_static_google_ads_googleads_v19_services_ListExperimentAsyncErrorsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse.class, com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse.Builder.class);
}
// Construct using com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (errorsBuilder_ == null) {
errors_ = java.util.Collections.emptyList();
} else {
errors_ = null;
errorsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v19.services.ExperimentServiceProto.internal_static_google_ads_googleads_v19_services_ListExperimentAsyncErrorsResponse_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse getDefaultInstanceForType() {
return com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse build() {
com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse buildPartial() {
com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse result = new com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse result) {
if (errorsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
errors_ = java.util.Collections.unmodifiableList(errors_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.errors_ = errors_;
} else {
result.errors_ = errorsBuilder_.build();
}
}
private void buildPartial0(com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse) {
return mergeFrom((com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse other) {
if (other == com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse.getDefaultInstance()) return this;
if (errorsBuilder_ == null) {
if (!other.errors_.isEmpty()) {
if (errors_.isEmpty()) {
errors_ = other.errors_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureErrorsIsMutable();
errors_.addAll(other.errors_);
}
onChanged();
}
} else {
if (!other.errors_.isEmpty()) {
if (errorsBuilder_.isEmpty()) {
errorsBuilder_.dispose();
errorsBuilder_ = null;
errors_ = other.errors_;
bitField0_ = (bitField0_ & ~0x00000001);
errorsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getErrorsFieldBuilder() : null;
} else {
errorsBuilder_.addAllMessages(other.errors_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
com.google.rpc.Status m =
input.readMessage(
com.google.rpc.Status.parser(),
extensionRegistry);
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.add(m);
} else {
errorsBuilder_.addMessage(m);
}
break;
} // case 10
case 18: {
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.rpc.Status> errors_ =
java.util.Collections.emptyList();
private void ensureErrorsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
errors_ = new java.util.ArrayList<com.google.rpc.Status>(errors_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> errorsBuilder_;
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public java.util.List<com.google.rpc.Status> getErrorsList() {
if (errorsBuilder_ == null) {
return java.util.Collections.unmodifiableList(errors_);
} else {
return errorsBuilder_.getMessageList();
}
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public int getErrorsCount() {
if (errorsBuilder_ == null) {
return errors_.size();
} else {
return errorsBuilder_.getCount();
}
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public com.google.rpc.Status getErrors(int index) {
if (errorsBuilder_ == null) {
return errors_.get(index);
} else {
return errorsBuilder_.getMessage(index);
}
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder setErrors(
int index, com.google.rpc.Status value) {
if (errorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureErrorsIsMutable();
errors_.set(index, value);
onChanged();
} else {
errorsBuilder_.setMessage(index, value);
}
return this;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder setErrors(
int index, com.google.rpc.Status.Builder builderForValue) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.set(index, builderForValue.build());
onChanged();
} else {
errorsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder addErrors(com.google.rpc.Status value) {
if (errorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureErrorsIsMutable();
errors_.add(value);
onChanged();
} else {
errorsBuilder_.addMessage(value);
}
return this;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder addErrors(
int index, com.google.rpc.Status value) {
if (errorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureErrorsIsMutable();
errors_.add(index, value);
onChanged();
} else {
errorsBuilder_.addMessage(index, value);
}
return this;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder addErrors(
com.google.rpc.Status.Builder builderForValue) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.add(builderForValue.build());
onChanged();
} else {
errorsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder addErrors(
int index, com.google.rpc.Status.Builder builderForValue) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.add(index, builderForValue.build());
onChanged();
} else {
errorsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder addAllErrors(
java.lang.Iterable<? extends com.google.rpc.Status> values) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, errors_);
onChanged();
} else {
errorsBuilder_.addAllMessages(values);
}
return this;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder clearErrors() {
if (errorsBuilder_ == null) {
errors_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
errorsBuilder_.clear();
}
return this;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder removeErrors(int index) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.remove(index);
onChanged();
} else {
errorsBuilder_.remove(index);
}
return this;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public com.google.rpc.Status.Builder getErrorsBuilder(
int index) {
return getErrorsFieldBuilder().getBuilder(index);
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public com.google.rpc.StatusOrBuilder getErrorsOrBuilder(
int index) {
if (errorsBuilder_ == null) {
return errors_.get(index); } else {
return errorsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public java.util.List<? extends com.google.rpc.StatusOrBuilder>
getErrorsOrBuilderList() {
if (errorsBuilder_ != null) {
return errorsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(errors_);
}
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public com.google.rpc.Status.Builder addErrorsBuilder() {
return getErrorsFieldBuilder().addBuilder(
com.google.rpc.Status.getDefaultInstance());
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public com.google.rpc.Status.Builder addErrorsBuilder(
int index) {
return getErrorsFieldBuilder().addBuilder(
index, com.google.rpc.Status.getDefaultInstance());
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public java.util.List<com.google.rpc.Status.Builder>
getErrorsBuilderList() {
return getErrorsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>
getErrorsFieldBuilder() {
if (errorsBuilder_ == null) {
errorsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>(
errors_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
errors_ = null;
}
return errorsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString
getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse)
private static final com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse();
}
public static com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListExperimentAsyncErrorsResponse>
PARSER = new com.google.protobuf.AbstractParser<ListExperimentAsyncErrorsResponse>() {
@java.lang.Override
public ListExperimentAsyncErrorsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListExperimentAsyncErrorsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListExperimentAsyncErrorsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.ListExperimentAsyncErrorsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 36,101 | google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/services/ListExperimentAsyncErrorsResponse.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v20/services/experiment_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v20.services;
/**
* <pre>
* Response message for
* [ExperimentService.ListExperimentAsyncErrors][google.ads.googleads.v20.services.ExperimentService.ListExperimentAsyncErrors].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse}
*/
public final class ListExperimentAsyncErrorsResponse extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse)
ListExperimentAsyncErrorsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListExperimentAsyncErrorsResponse.newBuilder() to construct.
private ListExperimentAsyncErrorsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListExperimentAsyncErrorsResponse() {
errors_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ListExperimentAsyncErrorsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.services.ExperimentServiceProto.internal_static_google_ads_googleads_v20_services_ListExperimentAsyncErrorsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.services.ExperimentServiceProto.internal_static_google_ads_googleads_v20_services_ListExperimentAsyncErrorsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse.class, com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse.Builder.class);
}
public static final int ERRORS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.rpc.Status> errors_;
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.rpc.Status> getErrorsList() {
return errors_;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.rpc.StatusOrBuilder>
getErrorsOrBuilderList() {
return errors_;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
@java.lang.Override
public int getErrorsCount() {
return errors_.size();
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
@java.lang.Override
public com.google.rpc.Status getErrors(int index) {
return errors_.get(index);
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
@java.lang.Override
public com.google.rpc.StatusOrBuilder getErrorsOrBuilder(
int index) {
return errors_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < errors_.size(); i++) {
output.writeMessage(1, errors_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < errors_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, errors_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse)) {
return super.equals(obj);
}
com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse other = (com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse) obj;
if (!getErrorsList()
.equals(other.getErrorsList())) return false;
if (!getNextPageToken()
.equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getErrorsCount() > 0) {
hash = (37 * hash) + ERRORS_FIELD_NUMBER;
hash = (53 * hash) + getErrorsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Response message for
* [ExperimentService.ListExperimentAsyncErrors][google.ads.googleads.v20.services.ExperimentService.ListExperimentAsyncErrors].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse)
com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.services.ExperimentServiceProto.internal_static_google_ads_googleads_v20_services_ListExperimentAsyncErrorsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.services.ExperimentServiceProto.internal_static_google_ads_googleads_v20_services_ListExperimentAsyncErrorsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse.class, com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse.Builder.class);
}
// Construct using com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (errorsBuilder_ == null) {
errors_ = java.util.Collections.emptyList();
} else {
errors_ = null;
errorsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v20.services.ExperimentServiceProto.internal_static_google_ads_googleads_v20_services_ListExperimentAsyncErrorsResponse_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse getDefaultInstanceForType() {
return com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse build() {
com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse buildPartial() {
com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse result = new com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse result) {
if (errorsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
errors_ = java.util.Collections.unmodifiableList(errors_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.errors_ = errors_;
} else {
result.errors_ = errorsBuilder_.build();
}
}
private void buildPartial0(com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse) {
return mergeFrom((com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse other) {
if (other == com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse.getDefaultInstance()) return this;
if (errorsBuilder_ == null) {
if (!other.errors_.isEmpty()) {
if (errors_.isEmpty()) {
errors_ = other.errors_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureErrorsIsMutable();
errors_.addAll(other.errors_);
}
onChanged();
}
} else {
if (!other.errors_.isEmpty()) {
if (errorsBuilder_.isEmpty()) {
errorsBuilder_.dispose();
errorsBuilder_ = null;
errors_ = other.errors_;
bitField0_ = (bitField0_ & ~0x00000001);
errorsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getErrorsFieldBuilder() : null;
} else {
errorsBuilder_.addAllMessages(other.errors_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
com.google.rpc.Status m =
input.readMessage(
com.google.rpc.Status.parser(),
extensionRegistry);
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.add(m);
} else {
errorsBuilder_.addMessage(m);
}
break;
} // case 10
case 18: {
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.rpc.Status> errors_ =
java.util.Collections.emptyList();
private void ensureErrorsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
errors_ = new java.util.ArrayList<com.google.rpc.Status>(errors_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> errorsBuilder_;
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public java.util.List<com.google.rpc.Status> getErrorsList() {
if (errorsBuilder_ == null) {
return java.util.Collections.unmodifiableList(errors_);
} else {
return errorsBuilder_.getMessageList();
}
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public int getErrorsCount() {
if (errorsBuilder_ == null) {
return errors_.size();
} else {
return errorsBuilder_.getCount();
}
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public com.google.rpc.Status getErrors(int index) {
if (errorsBuilder_ == null) {
return errors_.get(index);
} else {
return errorsBuilder_.getMessage(index);
}
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder setErrors(
int index, com.google.rpc.Status value) {
if (errorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureErrorsIsMutable();
errors_.set(index, value);
onChanged();
} else {
errorsBuilder_.setMessage(index, value);
}
return this;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder setErrors(
int index, com.google.rpc.Status.Builder builderForValue) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.set(index, builderForValue.build());
onChanged();
} else {
errorsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder addErrors(com.google.rpc.Status value) {
if (errorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureErrorsIsMutable();
errors_.add(value);
onChanged();
} else {
errorsBuilder_.addMessage(value);
}
return this;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder addErrors(
int index, com.google.rpc.Status value) {
if (errorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureErrorsIsMutable();
errors_.add(index, value);
onChanged();
} else {
errorsBuilder_.addMessage(index, value);
}
return this;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder addErrors(
com.google.rpc.Status.Builder builderForValue) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.add(builderForValue.build());
onChanged();
} else {
errorsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder addErrors(
int index, com.google.rpc.Status.Builder builderForValue) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.add(index, builderForValue.build());
onChanged();
} else {
errorsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder addAllErrors(
java.lang.Iterable<? extends com.google.rpc.Status> values) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, errors_);
onChanged();
} else {
errorsBuilder_.addAllMessages(values);
}
return this;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder clearErrors() {
if (errorsBuilder_ == null) {
errors_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
errorsBuilder_.clear();
}
return this;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder removeErrors(int index) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.remove(index);
onChanged();
} else {
errorsBuilder_.remove(index);
}
return this;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public com.google.rpc.Status.Builder getErrorsBuilder(
int index) {
return getErrorsFieldBuilder().getBuilder(index);
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public com.google.rpc.StatusOrBuilder getErrorsOrBuilder(
int index) {
if (errorsBuilder_ == null) {
return errors_.get(index); } else {
return errorsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public java.util.List<? extends com.google.rpc.StatusOrBuilder>
getErrorsOrBuilderList() {
if (errorsBuilder_ != null) {
return errorsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(errors_);
}
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public com.google.rpc.Status.Builder addErrorsBuilder() {
return getErrorsFieldBuilder().addBuilder(
com.google.rpc.Status.getDefaultInstance());
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public com.google.rpc.Status.Builder addErrorsBuilder(
int index) {
return getErrorsFieldBuilder().addBuilder(
index, com.google.rpc.Status.getDefaultInstance());
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public java.util.List<com.google.rpc.Status.Builder>
getErrorsBuilderList() {
return getErrorsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>
getErrorsFieldBuilder() {
if (errorsBuilder_ == null) {
errorsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>(
errors_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
errors_ = null;
}
return errorsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString
getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse)
private static final com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse();
}
public static com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListExperimentAsyncErrorsResponse>
PARSER = new com.google.protobuf.AbstractParser<ListExperimentAsyncErrorsResponse>() {
@java.lang.Override
public ListExperimentAsyncErrorsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListExperimentAsyncErrorsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListExperimentAsyncErrorsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.ListExperimentAsyncErrorsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 36,101 | google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/services/ListExperimentAsyncErrorsResponse.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v21/services/experiment_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v21.services;
/**
* <pre>
* Response message for
* [ExperimentService.ListExperimentAsyncErrors][google.ads.googleads.v21.services.ExperimentService.ListExperimentAsyncErrors].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse}
*/
public final class ListExperimentAsyncErrorsResponse extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse)
ListExperimentAsyncErrorsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListExperimentAsyncErrorsResponse.newBuilder() to construct.
private ListExperimentAsyncErrorsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListExperimentAsyncErrorsResponse() {
errors_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ListExperimentAsyncErrorsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.services.ExperimentServiceProto.internal_static_google_ads_googleads_v21_services_ListExperimentAsyncErrorsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.services.ExperimentServiceProto.internal_static_google_ads_googleads_v21_services_ListExperimentAsyncErrorsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse.class, com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse.Builder.class);
}
public static final int ERRORS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.rpc.Status> errors_;
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.rpc.Status> getErrorsList() {
return errors_;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.rpc.StatusOrBuilder>
getErrorsOrBuilderList() {
return errors_;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
@java.lang.Override
public int getErrorsCount() {
return errors_.size();
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
@java.lang.Override
public com.google.rpc.Status getErrors(int index) {
return errors_.get(index);
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
@java.lang.Override
public com.google.rpc.StatusOrBuilder getErrorsOrBuilder(
int index) {
return errors_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < errors_.size(); i++) {
output.writeMessage(1, errors_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < errors_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, errors_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse)) {
return super.equals(obj);
}
com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse other = (com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse) obj;
if (!getErrorsList()
.equals(other.getErrorsList())) return false;
if (!getNextPageToken()
.equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getErrorsCount() > 0) {
hash = (37 * hash) + ERRORS_FIELD_NUMBER;
hash = (53 * hash) + getErrorsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Response message for
* [ExperimentService.ListExperimentAsyncErrors][google.ads.googleads.v21.services.ExperimentService.ListExperimentAsyncErrors].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse)
com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.services.ExperimentServiceProto.internal_static_google_ads_googleads_v21_services_ListExperimentAsyncErrorsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.services.ExperimentServiceProto.internal_static_google_ads_googleads_v21_services_ListExperimentAsyncErrorsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse.class, com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse.Builder.class);
}
// Construct using com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (errorsBuilder_ == null) {
errors_ = java.util.Collections.emptyList();
} else {
errors_ = null;
errorsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v21.services.ExperimentServiceProto.internal_static_google_ads_googleads_v21_services_ListExperimentAsyncErrorsResponse_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse getDefaultInstanceForType() {
return com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse build() {
com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse buildPartial() {
com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse result = new com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse result) {
if (errorsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
errors_ = java.util.Collections.unmodifiableList(errors_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.errors_ = errors_;
} else {
result.errors_ = errorsBuilder_.build();
}
}
private void buildPartial0(com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse) {
return mergeFrom((com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse other) {
if (other == com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse.getDefaultInstance()) return this;
if (errorsBuilder_ == null) {
if (!other.errors_.isEmpty()) {
if (errors_.isEmpty()) {
errors_ = other.errors_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureErrorsIsMutable();
errors_.addAll(other.errors_);
}
onChanged();
}
} else {
if (!other.errors_.isEmpty()) {
if (errorsBuilder_.isEmpty()) {
errorsBuilder_.dispose();
errorsBuilder_ = null;
errors_ = other.errors_;
bitField0_ = (bitField0_ & ~0x00000001);
errorsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getErrorsFieldBuilder() : null;
} else {
errorsBuilder_.addAllMessages(other.errors_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
com.google.rpc.Status m =
input.readMessage(
com.google.rpc.Status.parser(),
extensionRegistry);
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.add(m);
} else {
errorsBuilder_.addMessage(m);
}
break;
} // case 10
case 18: {
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.rpc.Status> errors_ =
java.util.Collections.emptyList();
private void ensureErrorsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
errors_ = new java.util.ArrayList<com.google.rpc.Status>(errors_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> errorsBuilder_;
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public java.util.List<com.google.rpc.Status> getErrorsList() {
if (errorsBuilder_ == null) {
return java.util.Collections.unmodifiableList(errors_);
} else {
return errorsBuilder_.getMessageList();
}
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public int getErrorsCount() {
if (errorsBuilder_ == null) {
return errors_.size();
} else {
return errorsBuilder_.getCount();
}
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public com.google.rpc.Status getErrors(int index) {
if (errorsBuilder_ == null) {
return errors_.get(index);
} else {
return errorsBuilder_.getMessage(index);
}
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder setErrors(
int index, com.google.rpc.Status value) {
if (errorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureErrorsIsMutable();
errors_.set(index, value);
onChanged();
} else {
errorsBuilder_.setMessage(index, value);
}
return this;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder setErrors(
int index, com.google.rpc.Status.Builder builderForValue) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.set(index, builderForValue.build());
onChanged();
} else {
errorsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder addErrors(com.google.rpc.Status value) {
if (errorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureErrorsIsMutable();
errors_.add(value);
onChanged();
} else {
errorsBuilder_.addMessage(value);
}
return this;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder addErrors(
int index, com.google.rpc.Status value) {
if (errorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureErrorsIsMutable();
errors_.add(index, value);
onChanged();
} else {
errorsBuilder_.addMessage(index, value);
}
return this;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder addErrors(
com.google.rpc.Status.Builder builderForValue) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.add(builderForValue.build());
onChanged();
} else {
errorsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder addErrors(
int index, com.google.rpc.Status.Builder builderForValue) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.add(index, builderForValue.build());
onChanged();
} else {
errorsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder addAllErrors(
java.lang.Iterable<? extends com.google.rpc.Status> values) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, errors_);
onChanged();
} else {
errorsBuilder_.addAllMessages(values);
}
return this;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder clearErrors() {
if (errorsBuilder_ == null) {
errors_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
errorsBuilder_.clear();
}
return this;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder removeErrors(int index) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.remove(index);
onChanged();
} else {
errorsBuilder_.remove(index);
}
return this;
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public com.google.rpc.Status.Builder getErrorsBuilder(
int index) {
return getErrorsFieldBuilder().getBuilder(index);
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public com.google.rpc.StatusOrBuilder getErrorsOrBuilder(
int index) {
if (errorsBuilder_ == null) {
return errors_.get(index); } else {
return errorsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public java.util.List<? extends com.google.rpc.StatusOrBuilder>
getErrorsOrBuilderList() {
if (errorsBuilder_ != null) {
return errorsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(errors_);
}
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public com.google.rpc.Status.Builder addErrorsBuilder() {
return getErrorsFieldBuilder().addBuilder(
com.google.rpc.Status.getDefaultInstance());
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public com.google.rpc.Status.Builder addErrorsBuilder(
int index) {
return getErrorsFieldBuilder().addBuilder(
index, com.google.rpc.Status.getDefaultInstance());
}
/**
* <pre>
* details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public java.util.List<com.google.rpc.Status.Builder>
getErrorsBuilderList() {
return getErrorsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>
getErrorsFieldBuilder() {
if (errorsBuilder_ == null) {
errorsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>(
errors_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
errors_ = null;
}
return errorsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString
getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse)
private static final com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse();
}
public static com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListExperimentAsyncErrorsResponse>
PARSER = new com.google.protobuf.AbstractParser<ListExperimentAsyncErrorsResponse>() {
@java.lang.Override
public ListExperimentAsyncErrorsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListExperimentAsyncErrorsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListExperimentAsyncErrorsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.ListExperimentAsyncErrorsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,011 | java-shopping-merchant-accounts/proto-google-shopping-merchant-accounts-v1beta/src/main/java/com/google/shopping/merchant/accounts/v1beta/UpdateBusinessIdentityRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/shopping/merchant/accounts/v1beta/businessidentity.proto
// Protobuf Java Version: 3.25.8
package com.google.shopping.merchant.accounts.v1beta;
/**
*
*
* <pre>
* Request message for the `UpdateBusinessIdentity` method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest}
*/
public final class UpdateBusinessIdentityRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest)
UpdateBusinessIdentityRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateBusinessIdentityRequest.newBuilder() to construct.
private UpdateBusinessIdentityRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateBusinessIdentityRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateBusinessIdentityRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.accounts.v1beta.BusinessIdentityProto
.internal_static_google_shopping_merchant_accounts_v1beta_UpdateBusinessIdentityRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.accounts.v1beta.BusinessIdentityProto
.internal_static_google_shopping_merchant_accounts_v1beta_UpdateBusinessIdentityRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest.class,
com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest.Builder
.class);
}
private int bitField0_;
public static final int BUSINESS_IDENTITY_FIELD_NUMBER = 1;
private com.google.shopping.merchant.accounts.v1beta.BusinessIdentity businessIdentity_;
/**
*
*
* <pre>
* Required. The new version of the business identity.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.BusinessIdentity business_identity = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the businessIdentity field is set.
*/
@java.lang.Override
public boolean hasBusinessIdentity() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The new version of the business identity.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.BusinessIdentity business_identity = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The businessIdentity.
*/
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.BusinessIdentity getBusinessIdentity() {
return businessIdentity_ == null
? com.google.shopping.merchant.accounts.v1beta.BusinessIdentity.getDefaultInstance()
: businessIdentity_;
}
/**
*
*
* <pre>
* Required. The new version of the business identity.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.BusinessIdentity business_identity = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.BusinessIdentityOrBuilder
getBusinessIdentityOrBuilder() {
return businessIdentity_ == null
? com.google.shopping.merchant.accounts.v1beta.BusinessIdentity.getDefaultInstance()
: businessIdentity_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getBusinessIdentity());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getBusinessIdentity());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest)) {
return super.equals(obj);
}
com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest other =
(com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest) obj;
if (hasBusinessIdentity() != other.hasBusinessIdentity()) return false;
if (hasBusinessIdentity()) {
if (!getBusinessIdentity().equals(other.getBusinessIdentity())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasBusinessIdentity()) {
hash = (37 * hash) + BUSINESS_IDENTITY_FIELD_NUMBER;
hash = (53 * hash) + getBusinessIdentity().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for the `UpdateBusinessIdentity` method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest)
com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.accounts.v1beta.BusinessIdentityProto
.internal_static_google_shopping_merchant_accounts_v1beta_UpdateBusinessIdentityRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.accounts.v1beta.BusinessIdentityProto
.internal_static_google_shopping_merchant_accounts_v1beta_UpdateBusinessIdentityRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest.class,
com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest.Builder
.class);
}
// Construct using
// com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getBusinessIdentityFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
businessIdentity_ = null;
if (businessIdentityBuilder_ != null) {
businessIdentityBuilder_.dispose();
businessIdentityBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.shopping.merchant.accounts.v1beta.BusinessIdentityProto
.internal_static_google_shopping_merchant_accounts_v1beta_UpdateBusinessIdentityRequest_descriptor;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest
getDefaultInstanceForType() {
return com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest build() {
com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest
buildPartial() {
com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest result =
new com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.businessIdentity_ =
businessIdentityBuilder_ == null ? businessIdentity_ : businessIdentityBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest) {
return mergeFrom(
(com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest other) {
if (other
== com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest
.getDefaultInstance()) return this;
if (other.hasBusinessIdentity()) {
mergeBusinessIdentity(other.getBusinessIdentity());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(
getBusinessIdentityFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.shopping.merchant.accounts.v1beta.BusinessIdentity businessIdentity_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.shopping.merchant.accounts.v1beta.BusinessIdentity,
com.google.shopping.merchant.accounts.v1beta.BusinessIdentity.Builder,
com.google.shopping.merchant.accounts.v1beta.BusinessIdentityOrBuilder>
businessIdentityBuilder_;
/**
*
*
* <pre>
* Required. The new version of the business identity.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.BusinessIdentity business_identity = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the businessIdentity field is set.
*/
public boolean hasBusinessIdentity() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The new version of the business identity.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.BusinessIdentity business_identity = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The businessIdentity.
*/
public com.google.shopping.merchant.accounts.v1beta.BusinessIdentity getBusinessIdentity() {
if (businessIdentityBuilder_ == null) {
return businessIdentity_ == null
? com.google.shopping.merchant.accounts.v1beta.BusinessIdentity.getDefaultInstance()
: businessIdentity_;
} else {
return businessIdentityBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The new version of the business identity.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.BusinessIdentity business_identity = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setBusinessIdentity(
com.google.shopping.merchant.accounts.v1beta.BusinessIdentity value) {
if (businessIdentityBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
businessIdentity_ = value;
} else {
businessIdentityBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The new version of the business identity.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.BusinessIdentity business_identity = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setBusinessIdentity(
com.google.shopping.merchant.accounts.v1beta.BusinessIdentity.Builder builderForValue) {
if (businessIdentityBuilder_ == null) {
businessIdentity_ = builderForValue.build();
} else {
businessIdentityBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The new version of the business identity.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.BusinessIdentity business_identity = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeBusinessIdentity(
com.google.shopping.merchant.accounts.v1beta.BusinessIdentity value) {
if (businessIdentityBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& businessIdentity_ != null
&& businessIdentity_
!= com.google.shopping.merchant.accounts.v1beta.BusinessIdentity
.getDefaultInstance()) {
getBusinessIdentityBuilder().mergeFrom(value);
} else {
businessIdentity_ = value;
}
} else {
businessIdentityBuilder_.mergeFrom(value);
}
if (businessIdentity_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The new version of the business identity.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.BusinessIdentity business_identity = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearBusinessIdentity() {
bitField0_ = (bitField0_ & ~0x00000001);
businessIdentity_ = null;
if (businessIdentityBuilder_ != null) {
businessIdentityBuilder_.dispose();
businessIdentityBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The new version of the business identity.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.BusinessIdentity business_identity = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.shopping.merchant.accounts.v1beta.BusinessIdentity.Builder
getBusinessIdentityBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getBusinessIdentityFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The new version of the business identity.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.BusinessIdentity business_identity = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.shopping.merchant.accounts.v1beta.BusinessIdentityOrBuilder
getBusinessIdentityOrBuilder() {
if (businessIdentityBuilder_ != null) {
return businessIdentityBuilder_.getMessageOrBuilder();
} else {
return businessIdentity_ == null
? com.google.shopping.merchant.accounts.v1beta.BusinessIdentity.getDefaultInstance()
: businessIdentity_;
}
}
/**
*
*
* <pre>
* Required. The new version of the business identity.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1beta.BusinessIdentity business_identity = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.shopping.merchant.accounts.v1beta.BusinessIdentity,
com.google.shopping.merchant.accounts.v1beta.BusinessIdentity.Builder,
com.google.shopping.merchant.accounts.v1beta.BusinessIdentityOrBuilder>
getBusinessIdentityFieldBuilder() {
if (businessIdentityBuilder_ == null) {
businessIdentityBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.shopping.merchant.accounts.v1beta.BusinessIdentity,
com.google.shopping.merchant.accounts.v1beta.BusinessIdentity.Builder,
com.google.shopping.merchant.accounts.v1beta.BusinessIdentityOrBuilder>(
getBusinessIdentity(), getParentForChildren(), isClean());
businessIdentity_ = null;
}
return businessIdentityBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. List of fields being updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest)
}
// @@protoc_insertion_point(class_scope:google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest)
private static final com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest();
}
public static com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateBusinessIdentityRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateBusinessIdentityRequest>() {
@java.lang.Override
public UpdateBusinessIdentityRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateBusinessIdentityRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateBusinessIdentityRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1beta.UpdateBusinessIdentityRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,054 | java-analytics-data/proto-google-analytics-data-v1beta/src/main/java/com/google/analytics/data/v1beta/Metric.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/analytics/data/v1beta/data.proto
// Protobuf Java Version: 3.25.8
package com.google.analytics.data.v1beta;
/**
*
*
* <pre>
* The quantitative measurements of a report. For example, the metric
* `eventCount` is the total number of events. Requests are allowed up to 10
* metrics.
* </pre>
*
* Protobuf type {@code google.analytics.data.v1beta.Metric}
*/
public final class Metric extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.analytics.data.v1beta.Metric)
MetricOrBuilder {
private static final long serialVersionUID = 0L;
// Use Metric.newBuilder() to construct.
private Metric(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Metric() {
name_ = "";
expression_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new Metric();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.data.v1beta.ReportingApiProto
.internal_static_google_analytics_data_v1beta_Metric_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.data.v1beta.ReportingApiProto
.internal_static_google_analytics_data_v1beta_Metric_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.data.v1beta.Metric.class,
com.google.analytics.data.v1beta.Metric.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* The name of the metric. See the [API
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/api-schema#metrics)
* for the list of metric names supported by core reporting methods such
* as `runReport` and `batchRunReports`. See
* [Realtime
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/realtime-api-schema#metrics)
* for the list of metric names supported by the `runRealtimeReport`
* method. See
* [Funnel
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/exploration-api-schema#metrics)
* for the list of metric names supported by the `runFunnelReport`
* method.
*
* If `expression` is specified, `name` can be any string that you would like
* within the allowed character set. For example if `expression` is
* `screenPageViews/sessions`, you could call that metric's name =
* `viewsPerSession`. Metric names that you choose must match the regular
* expression `^[a-zA-Z0-9_]$`.
*
* Metrics are referenced by `name` in `metricFilter`, `orderBys`, and metric
* `expression`.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* The name of the metric. See the [API
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/api-schema#metrics)
* for the list of metric names supported by core reporting methods such
* as `runReport` and `batchRunReports`. See
* [Realtime
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/realtime-api-schema#metrics)
* for the list of metric names supported by the `runRealtimeReport`
* method. See
* [Funnel
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/exploration-api-schema#metrics)
* for the list of metric names supported by the `runFunnelReport`
* method.
*
* If `expression` is specified, `name` can be any string that you would like
* within the allowed character set. For example if `expression` is
* `screenPageViews/sessions`, you could call that metric's name =
* `viewsPerSession`. Metric names that you choose must match the regular
* expression `^[a-zA-Z0-9_]$`.
*
* Metrics are referenced by `name` in `metricFilter`, `orderBys`, and metric
* `expression`.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int EXPRESSION_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object expression_ = "";
/**
*
*
* <pre>
* A mathematical expression for derived metrics. For example, the metric
* Event count per user is `eventCount/totalUsers`.
* </pre>
*
* <code>string expression = 2;</code>
*
* @return The expression.
*/
@java.lang.Override
public java.lang.String getExpression() {
java.lang.Object ref = expression_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
expression_ = s;
return s;
}
}
/**
*
*
* <pre>
* A mathematical expression for derived metrics. For example, the metric
* Event count per user is `eventCount/totalUsers`.
* </pre>
*
* <code>string expression = 2;</code>
*
* @return The bytes for expression.
*/
@java.lang.Override
public com.google.protobuf.ByteString getExpressionBytes() {
java.lang.Object ref = expression_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
expression_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int INVISIBLE_FIELD_NUMBER = 3;
private boolean invisible_ = false;
/**
*
*
* <pre>
* Indicates if a metric is invisible in the report response. If a metric is
* invisible, the metric will not produce a column in the response, but can be
* used in `metricFilter`, `orderBys`, or a metric `expression`.
* </pre>
*
* <code>bool invisible = 3;</code>
*
* @return The invisible.
*/
@java.lang.Override
public boolean getInvisible() {
return invisible_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(expression_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, expression_);
}
if (invisible_ != false) {
output.writeBool(3, invisible_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(expression_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, expression_);
}
if (invisible_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, invisible_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.analytics.data.v1beta.Metric)) {
return super.equals(obj);
}
com.google.analytics.data.v1beta.Metric other = (com.google.analytics.data.v1beta.Metric) obj;
if (!getName().equals(other.getName())) return false;
if (!getExpression().equals(other.getExpression())) return false;
if (getInvisible() != other.getInvisible()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + EXPRESSION_FIELD_NUMBER;
hash = (53 * hash) + getExpression().hashCode();
hash = (37 * hash) + INVISIBLE_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getInvisible());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.analytics.data.v1beta.Metric parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.data.v1beta.Metric parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.data.v1beta.Metric parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.data.v1beta.Metric parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.data.v1beta.Metric parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.data.v1beta.Metric parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.data.v1beta.Metric parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.data.v1beta.Metric parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.data.v1beta.Metric parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.analytics.data.v1beta.Metric parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.data.v1beta.Metric parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.data.v1beta.Metric parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.analytics.data.v1beta.Metric prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The quantitative measurements of a report. For example, the metric
* `eventCount` is the total number of events. Requests are allowed up to 10
* metrics.
* </pre>
*
* Protobuf type {@code google.analytics.data.v1beta.Metric}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.analytics.data.v1beta.Metric)
com.google.analytics.data.v1beta.MetricOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.data.v1beta.ReportingApiProto
.internal_static_google_analytics_data_v1beta_Metric_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.data.v1beta.ReportingApiProto
.internal_static_google_analytics_data_v1beta_Metric_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.data.v1beta.Metric.class,
com.google.analytics.data.v1beta.Metric.Builder.class);
}
// Construct using com.google.analytics.data.v1beta.Metric.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
expression_ = "";
invisible_ = false;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.analytics.data.v1beta.ReportingApiProto
.internal_static_google_analytics_data_v1beta_Metric_descriptor;
}
@java.lang.Override
public com.google.analytics.data.v1beta.Metric getDefaultInstanceForType() {
return com.google.analytics.data.v1beta.Metric.getDefaultInstance();
}
@java.lang.Override
public com.google.analytics.data.v1beta.Metric build() {
com.google.analytics.data.v1beta.Metric result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.analytics.data.v1beta.Metric buildPartial() {
com.google.analytics.data.v1beta.Metric result =
new com.google.analytics.data.v1beta.Metric(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.analytics.data.v1beta.Metric result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.expression_ = expression_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.invisible_ = invisible_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.analytics.data.v1beta.Metric) {
return mergeFrom((com.google.analytics.data.v1beta.Metric) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.analytics.data.v1beta.Metric other) {
if (other == com.google.analytics.data.v1beta.Metric.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getExpression().isEmpty()) {
expression_ = other.expression_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getInvisible() != false) {
setInvisible(other.getInvisible());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
expression_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
invisible_ = input.readBool();
bitField0_ |= 0x00000004;
break;
} // case 24
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* The name of the metric. See the [API
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/api-schema#metrics)
* for the list of metric names supported by core reporting methods such
* as `runReport` and `batchRunReports`. See
* [Realtime
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/realtime-api-schema#metrics)
* for the list of metric names supported by the `runRealtimeReport`
* method. See
* [Funnel
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/exploration-api-schema#metrics)
* for the list of metric names supported by the `runFunnelReport`
* method.
*
* If `expression` is specified, `name` can be any string that you would like
* within the allowed character set. For example if `expression` is
* `screenPageViews/sessions`, you could call that metric's name =
* `viewsPerSession`. Metric names that you choose must match the regular
* expression `^[a-zA-Z0-9_]$`.
*
* Metrics are referenced by `name` in `metricFilter`, `orderBys`, and metric
* `expression`.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The name of the metric. See the [API
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/api-schema#metrics)
* for the list of metric names supported by core reporting methods such
* as `runReport` and `batchRunReports`. See
* [Realtime
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/realtime-api-schema#metrics)
* for the list of metric names supported by the `runRealtimeReport`
* method. See
* [Funnel
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/exploration-api-schema#metrics)
* for the list of metric names supported by the `runFunnelReport`
* method.
*
* If `expression` is specified, `name` can be any string that you would like
* within the allowed character set. For example if `expression` is
* `screenPageViews/sessions`, you could call that metric's name =
* `viewsPerSession`. Metric names that you choose must match the regular
* expression `^[a-zA-Z0-9_]$`.
*
* Metrics are referenced by `name` in `metricFilter`, `orderBys`, and metric
* `expression`.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The name of the metric. See the [API
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/api-schema#metrics)
* for the list of metric names supported by core reporting methods such
* as `runReport` and `batchRunReports`. See
* [Realtime
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/realtime-api-schema#metrics)
* for the list of metric names supported by the `runRealtimeReport`
* method. See
* [Funnel
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/exploration-api-schema#metrics)
* for the list of metric names supported by the `runFunnelReport`
* method.
*
* If `expression` is specified, `name` can be any string that you would like
* within the allowed character set. For example if `expression` is
* `screenPageViews/sessions`, you could call that metric's name =
* `viewsPerSession`. Metric names that you choose must match the regular
* expression `^[a-zA-Z0-9_]$`.
*
* Metrics are referenced by `name` in `metricFilter`, `orderBys`, and metric
* `expression`.
* </pre>
*
* <code>string name = 1;</code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the metric. See the [API
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/api-schema#metrics)
* for the list of metric names supported by core reporting methods such
* as `runReport` and `batchRunReports`. See
* [Realtime
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/realtime-api-schema#metrics)
* for the list of metric names supported by the `runRealtimeReport`
* method. See
* [Funnel
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/exploration-api-schema#metrics)
* for the list of metric names supported by the `runFunnelReport`
* method.
*
* If `expression` is specified, `name` can be any string that you would like
* within the allowed character set. For example if `expression` is
* `screenPageViews/sessions`, you could call that metric's name =
* `viewsPerSession`. Metric names that you choose must match the regular
* expression `^[a-zA-Z0-9_]$`.
*
* Metrics are referenced by `name` in `metricFilter`, `orderBys`, and metric
* `expression`.
* </pre>
*
* <code>string name = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the metric. See the [API
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/api-schema#metrics)
* for the list of metric names supported by core reporting methods such
* as `runReport` and `batchRunReports`. See
* [Realtime
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/realtime-api-schema#metrics)
* for the list of metric names supported by the `runRealtimeReport`
* method. See
* [Funnel
* Metrics](https://developers.google.com/analytics/devguides/reporting/data/v1/exploration-api-schema#metrics)
* for the list of metric names supported by the `runFunnelReport`
* method.
*
* If `expression` is specified, `name` can be any string that you would like
* within the allowed character set. For example if `expression` is
* `screenPageViews/sessions`, you could call that metric's name =
* `viewsPerSession`. Metric names that you choose must match the regular
* expression `^[a-zA-Z0-9_]$`.
*
* Metrics are referenced by `name` in `metricFilter`, `orderBys`, and metric
* `expression`.
* </pre>
*
* <code>string name = 1;</code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object expression_ = "";
/**
*
*
* <pre>
* A mathematical expression for derived metrics. For example, the metric
* Event count per user is `eventCount/totalUsers`.
* </pre>
*
* <code>string expression = 2;</code>
*
* @return The expression.
*/
public java.lang.String getExpression() {
java.lang.Object ref = expression_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
expression_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A mathematical expression for derived metrics. For example, the metric
* Event count per user is `eventCount/totalUsers`.
* </pre>
*
* <code>string expression = 2;</code>
*
* @return The bytes for expression.
*/
public com.google.protobuf.ByteString getExpressionBytes() {
java.lang.Object ref = expression_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
expression_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A mathematical expression for derived metrics. For example, the metric
* Event count per user is `eventCount/totalUsers`.
* </pre>
*
* <code>string expression = 2;</code>
*
* @param value The expression to set.
* @return This builder for chaining.
*/
public Builder setExpression(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
expression_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A mathematical expression for derived metrics. For example, the metric
* Event count per user is `eventCount/totalUsers`.
* </pre>
*
* <code>string expression = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearExpression() {
expression_ = getDefaultInstance().getExpression();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A mathematical expression for derived metrics. For example, the metric
* Event count per user is `eventCount/totalUsers`.
* </pre>
*
* <code>string expression = 2;</code>
*
* @param value The bytes for expression to set.
* @return This builder for chaining.
*/
public Builder setExpressionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
expression_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private boolean invisible_;
/**
*
*
* <pre>
* Indicates if a metric is invisible in the report response. If a metric is
* invisible, the metric will not produce a column in the response, but can be
* used in `metricFilter`, `orderBys`, or a metric `expression`.
* </pre>
*
* <code>bool invisible = 3;</code>
*
* @return The invisible.
*/
@java.lang.Override
public boolean getInvisible() {
return invisible_;
}
/**
*
*
* <pre>
* Indicates if a metric is invisible in the report response. If a metric is
* invisible, the metric will not produce a column in the response, but can be
* used in `metricFilter`, `orderBys`, or a metric `expression`.
* </pre>
*
* <code>bool invisible = 3;</code>
*
* @param value The invisible to set.
* @return This builder for chaining.
*/
public Builder setInvisible(boolean value) {
invisible_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Indicates if a metric is invisible in the report response. If a metric is
* invisible, the metric will not produce a column in the response, but can be
* used in `metricFilter`, `orderBys`, or a metric `expression`.
* </pre>
*
* <code>bool invisible = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearInvisible() {
bitField0_ = (bitField0_ & ~0x00000004);
invisible_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.analytics.data.v1beta.Metric)
}
// @@protoc_insertion_point(class_scope:google.analytics.data.v1beta.Metric)
private static final com.google.analytics.data.v1beta.Metric DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.analytics.data.v1beta.Metric();
}
public static com.google.analytics.data.v1beta.Metric getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Metric> PARSER =
new com.google.protobuf.AbstractParser<Metric>() {
@java.lang.Override
public Metric parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<Metric> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Metric> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.analytics.data.v1beta.Metric getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,991 | java-analytics-admin/proto-google-analytics-admin-v1beta/src/main/java/com/google/analytics/admin/v1beta/UpdateAccountRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/analytics/admin/v1beta/analytics_admin.proto
// Protobuf Java Version: 3.25.8
package com.google.analytics.admin.v1beta;
/**
*
*
* <pre>
* Request message for UpdateAccount RPC.
* </pre>
*
* Protobuf type {@code google.analytics.admin.v1beta.UpdateAccountRequest}
*/
public final class UpdateAccountRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.analytics.admin.v1beta.UpdateAccountRequest)
UpdateAccountRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateAccountRequest.newBuilder() to construct.
private UpdateAccountRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateAccountRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateAccountRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.admin.v1beta.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1beta_UpdateAccountRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.admin.v1beta.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1beta_UpdateAccountRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.admin.v1beta.UpdateAccountRequest.class,
com.google.analytics.admin.v1beta.UpdateAccountRequest.Builder.class);
}
private int bitField0_;
public static final int ACCOUNT_FIELD_NUMBER = 1;
private com.google.analytics.admin.v1beta.Account account_;
/**
*
*
* <pre>
* Required. The account to update.
* The account's `name` field is used to identify the account.
* </pre>
*
* <code>
* .google.analytics.admin.v1beta.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the account field is set.
*/
@java.lang.Override
public boolean hasAccount() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The account to update.
* The account's `name` field is used to identify the account.
* </pre>
*
* <code>
* .google.analytics.admin.v1beta.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The account.
*/
@java.lang.Override
public com.google.analytics.admin.v1beta.Account getAccount() {
return account_ == null
? com.google.analytics.admin.v1beta.Account.getDefaultInstance()
: account_;
}
/**
*
*
* <pre>
* Required. The account to update.
* The account's `name` field is used to identify the account.
* </pre>
*
* <code>
* .google.analytics.admin.v1beta.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.analytics.admin.v1beta.AccountOrBuilder getAccountOrBuilder() {
return account_ == null
? com.google.analytics.admin.v1beta.Account.getDefaultInstance()
: account_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (for example, "field_to_update"). Omitted fields will not be updated.
* To replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (for example, "field_to_update"). Omitted fields will not be updated.
* To replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (for example, "field_to_update"). Omitted fields will not be updated.
* To replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getAccount());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getAccount());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.analytics.admin.v1beta.UpdateAccountRequest)) {
return super.equals(obj);
}
com.google.analytics.admin.v1beta.UpdateAccountRequest other =
(com.google.analytics.admin.v1beta.UpdateAccountRequest) obj;
if (hasAccount() != other.hasAccount()) return false;
if (hasAccount()) {
if (!getAccount().equals(other.getAccount())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasAccount()) {
hash = (37 * hash) + ACCOUNT_FIELD_NUMBER;
hash = (53 * hash) + getAccount().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.analytics.admin.v1beta.UpdateAccountRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1beta.UpdateAccountRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1beta.UpdateAccountRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1beta.UpdateAccountRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1beta.UpdateAccountRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1beta.UpdateAccountRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1beta.UpdateAccountRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1beta.UpdateAccountRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.admin.v1beta.UpdateAccountRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1beta.UpdateAccountRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.admin.v1beta.UpdateAccountRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1beta.UpdateAccountRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.analytics.admin.v1beta.UpdateAccountRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for UpdateAccount RPC.
* </pre>
*
* Protobuf type {@code google.analytics.admin.v1beta.UpdateAccountRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.analytics.admin.v1beta.UpdateAccountRequest)
com.google.analytics.admin.v1beta.UpdateAccountRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.admin.v1beta.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1beta_UpdateAccountRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.admin.v1beta.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1beta_UpdateAccountRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.admin.v1beta.UpdateAccountRequest.class,
com.google.analytics.admin.v1beta.UpdateAccountRequest.Builder.class);
}
// Construct using com.google.analytics.admin.v1beta.UpdateAccountRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getAccountFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
account_ = null;
if (accountBuilder_ != null) {
accountBuilder_.dispose();
accountBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.analytics.admin.v1beta.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1beta_UpdateAccountRequest_descriptor;
}
@java.lang.Override
public com.google.analytics.admin.v1beta.UpdateAccountRequest getDefaultInstanceForType() {
return com.google.analytics.admin.v1beta.UpdateAccountRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.analytics.admin.v1beta.UpdateAccountRequest build() {
com.google.analytics.admin.v1beta.UpdateAccountRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.analytics.admin.v1beta.UpdateAccountRequest buildPartial() {
com.google.analytics.admin.v1beta.UpdateAccountRequest result =
new com.google.analytics.admin.v1beta.UpdateAccountRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.analytics.admin.v1beta.UpdateAccountRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.account_ = accountBuilder_ == null ? account_ : accountBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.analytics.admin.v1beta.UpdateAccountRequest) {
return mergeFrom((com.google.analytics.admin.v1beta.UpdateAccountRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.analytics.admin.v1beta.UpdateAccountRequest other) {
if (other == com.google.analytics.admin.v1beta.UpdateAccountRequest.getDefaultInstance())
return this;
if (other.hasAccount()) {
mergeAccount(other.getAccount());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getAccountFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.analytics.admin.v1beta.Account account_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.analytics.admin.v1beta.Account,
com.google.analytics.admin.v1beta.Account.Builder,
com.google.analytics.admin.v1beta.AccountOrBuilder>
accountBuilder_;
/**
*
*
* <pre>
* Required. The account to update.
* The account's `name` field is used to identify the account.
* </pre>
*
* <code>
* .google.analytics.admin.v1beta.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the account field is set.
*/
public boolean hasAccount() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The account to update.
* The account's `name` field is used to identify the account.
* </pre>
*
* <code>
* .google.analytics.admin.v1beta.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The account.
*/
public com.google.analytics.admin.v1beta.Account getAccount() {
if (accountBuilder_ == null) {
return account_ == null
? com.google.analytics.admin.v1beta.Account.getDefaultInstance()
: account_;
} else {
return accountBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The account to update.
* The account's `name` field is used to identify the account.
* </pre>
*
* <code>
* .google.analytics.admin.v1beta.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setAccount(com.google.analytics.admin.v1beta.Account value) {
if (accountBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
account_ = value;
} else {
accountBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The account to update.
* The account's `name` field is used to identify the account.
* </pre>
*
* <code>
* .google.analytics.admin.v1beta.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setAccount(com.google.analytics.admin.v1beta.Account.Builder builderForValue) {
if (accountBuilder_ == null) {
account_ = builderForValue.build();
} else {
accountBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The account to update.
* The account's `name` field is used to identify the account.
* </pre>
*
* <code>
* .google.analytics.admin.v1beta.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeAccount(com.google.analytics.admin.v1beta.Account value) {
if (accountBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& account_ != null
&& account_ != com.google.analytics.admin.v1beta.Account.getDefaultInstance()) {
getAccountBuilder().mergeFrom(value);
} else {
account_ = value;
}
} else {
accountBuilder_.mergeFrom(value);
}
if (account_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The account to update.
* The account's `name` field is used to identify the account.
* </pre>
*
* <code>
* .google.analytics.admin.v1beta.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearAccount() {
bitField0_ = (bitField0_ & ~0x00000001);
account_ = null;
if (accountBuilder_ != null) {
accountBuilder_.dispose();
accountBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The account to update.
* The account's `name` field is used to identify the account.
* </pre>
*
* <code>
* .google.analytics.admin.v1beta.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.analytics.admin.v1beta.Account.Builder getAccountBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getAccountFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The account to update.
* The account's `name` field is used to identify the account.
* </pre>
*
* <code>
* .google.analytics.admin.v1beta.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.analytics.admin.v1beta.AccountOrBuilder getAccountOrBuilder() {
if (accountBuilder_ != null) {
return accountBuilder_.getMessageOrBuilder();
} else {
return account_ == null
? com.google.analytics.admin.v1beta.Account.getDefaultInstance()
: account_;
}
}
/**
*
*
* <pre>
* Required. The account to update.
* The account's `name` field is used to identify the account.
* </pre>
*
* <code>
* .google.analytics.admin.v1beta.Account account = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.analytics.admin.v1beta.Account,
com.google.analytics.admin.v1beta.Account.Builder,
com.google.analytics.admin.v1beta.AccountOrBuilder>
getAccountFieldBuilder() {
if (accountBuilder_ == null) {
accountBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.analytics.admin.v1beta.Account,
com.google.analytics.admin.v1beta.Account.Builder,
com.google.analytics.admin.v1beta.AccountOrBuilder>(
getAccount(), getParentForChildren(), isClean());
account_ = null;
}
return accountBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (for example, "field_to_update"). Omitted fields will not be updated.
* To replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (for example, "field_to_update"). Omitted fields will not be updated.
* To replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (for example, "field_to_update"). Omitted fields will not be updated.
* To replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (for example, "field_to_update"). Omitted fields will not be updated.
* To replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (for example, "field_to_update"). Omitted fields will not be updated.
* To replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (for example, "field_to_update"). Omitted fields will not be updated.
* To replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (for example, "field_to_update"). Omitted fields will not be updated.
* To replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (for example, "field_to_update"). Omitted fields will not be updated.
* To replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (for example, "field_to_update"). Omitted fields will not be updated.
* To replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.analytics.admin.v1beta.UpdateAccountRequest)
}
// @@protoc_insertion_point(class_scope:google.analytics.admin.v1beta.UpdateAccountRequest)
private static final com.google.analytics.admin.v1beta.UpdateAccountRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.analytics.admin.v1beta.UpdateAccountRequest();
}
public static com.google.analytics.admin.v1beta.UpdateAccountRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateAccountRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateAccountRequest>() {
@java.lang.Override
public UpdateAccountRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateAccountRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateAccountRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.analytics.admin.v1beta.UpdateAccountRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,054 | java-managedkafka/proto-google-cloud-managedkafka-v1/src/main/java/com/google/cloud/managedkafka/v1/UpdateTopicRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/managedkafka/v1/managed_kafka.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.managedkafka.v1;
/**
*
*
* <pre>
* Request for UpdateTopic.
* </pre>
*
* Protobuf type {@code google.cloud.managedkafka.v1.UpdateTopicRequest}
*/
public final class UpdateTopicRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.managedkafka.v1.UpdateTopicRequest)
UpdateTopicRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateTopicRequest.newBuilder() to construct.
private UpdateTopicRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateTopicRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateTopicRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.managedkafka.v1.ManagedKafkaProto
.internal_static_google_cloud_managedkafka_v1_UpdateTopicRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.managedkafka.v1.ManagedKafkaProto
.internal_static_google_cloud_managedkafka_v1_UpdateTopicRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.managedkafka.v1.UpdateTopicRequest.class,
com.google.cloud.managedkafka.v1.UpdateTopicRequest.Builder.class);
}
private int bitField0_;
public static final int UPDATE_MASK_FIELD_NUMBER = 1;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Topic resource by the update. The fields specified in the update_mask are
* relative to the resource, not the full request. A field will be overwritten
* if it is in the mask. The mask is required and a value of * will update all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Topic resource by the update. The fields specified in the update_mask are
* relative to the resource, not the full request. A field will be overwritten
* if it is in the mask. The mask is required and a value of * will update all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Topic resource by the update. The fields specified in the update_mask are
* relative to the resource, not the full request. A field will be overwritten
* if it is in the mask. The mask is required and a value of * will update all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
public static final int TOPIC_FIELD_NUMBER = 2;
private com.google.cloud.managedkafka.v1.Topic topic_;
/**
*
*
* <pre>
* Required. The topic to update. Its `name` field must be populated.
* </pre>
*
* <code>.google.cloud.managedkafka.v1.Topic topic = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the topic field is set.
*/
@java.lang.Override
public boolean hasTopic() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The topic to update. Its `name` field must be populated.
* </pre>
*
* <code>.google.cloud.managedkafka.v1.Topic topic = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The topic.
*/
@java.lang.Override
public com.google.cloud.managedkafka.v1.Topic getTopic() {
return topic_ == null ? com.google.cloud.managedkafka.v1.Topic.getDefaultInstance() : topic_;
}
/**
*
*
* <pre>
* Required. The topic to update. Its `name` field must be populated.
* </pre>
*
* <code>.google.cloud.managedkafka.v1.Topic topic = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.managedkafka.v1.TopicOrBuilder getTopicOrBuilder() {
return topic_ == null ? com.google.cloud.managedkafka.v1.Topic.getDefaultInstance() : topic_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getTopic());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getTopic());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.managedkafka.v1.UpdateTopicRequest)) {
return super.equals(obj);
}
com.google.cloud.managedkafka.v1.UpdateTopicRequest other =
(com.google.cloud.managedkafka.v1.UpdateTopicRequest) obj;
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (hasTopic() != other.hasTopic()) return false;
if (hasTopic()) {
if (!getTopic().equals(other.getTopic())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
if (hasTopic()) {
hash = (37 * hash) + TOPIC_FIELD_NUMBER;
hash = (53 * hash) + getTopic().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.managedkafka.v1.UpdateTopicRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.managedkafka.v1.UpdateTopicRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.UpdateTopicRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.managedkafka.v1.UpdateTopicRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.UpdateTopicRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.managedkafka.v1.UpdateTopicRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.UpdateTopicRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.managedkafka.v1.UpdateTopicRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.UpdateTopicRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.managedkafka.v1.UpdateTopicRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.UpdateTopicRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.managedkafka.v1.UpdateTopicRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.managedkafka.v1.UpdateTopicRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for UpdateTopic.
* </pre>
*
* Protobuf type {@code google.cloud.managedkafka.v1.UpdateTopicRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.managedkafka.v1.UpdateTopicRequest)
com.google.cloud.managedkafka.v1.UpdateTopicRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.managedkafka.v1.ManagedKafkaProto
.internal_static_google_cloud_managedkafka_v1_UpdateTopicRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.managedkafka.v1.ManagedKafkaProto
.internal_static_google_cloud_managedkafka_v1_UpdateTopicRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.managedkafka.v1.UpdateTopicRequest.class,
com.google.cloud.managedkafka.v1.UpdateTopicRequest.Builder.class);
}
// Construct using com.google.cloud.managedkafka.v1.UpdateTopicRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getUpdateMaskFieldBuilder();
getTopicFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
topic_ = null;
if (topicBuilder_ != null) {
topicBuilder_.dispose();
topicBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.managedkafka.v1.ManagedKafkaProto
.internal_static_google_cloud_managedkafka_v1_UpdateTopicRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.UpdateTopicRequest getDefaultInstanceForType() {
return com.google.cloud.managedkafka.v1.UpdateTopicRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.UpdateTopicRequest build() {
com.google.cloud.managedkafka.v1.UpdateTopicRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.UpdateTopicRequest buildPartial() {
com.google.cloud.managedkafka.v1.UpdateTopicRequest result =
new com.google.cloud.managedkafka.v1.UpdateTopicRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.managedkafka.v1.UpdateTopicRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.topic_ = topicBuilder_ == null ? topic_ : topicBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.managedkafka.v1.UpdateTopicRequest) {
return mergeFrom((com.google.cloud.managedkafka.v1.UpdateTopicRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.managedkafka.v1.UpdateTopicRequest other) {
if (other == com.google.cloud.managedkafka.v1.UpdateTopicRequest.getDefaultInstance())
return this;
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
if (other.hasTopic()) {
mergeTopic(other.getTopic());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getTopicFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Topic resource by the update. The fields specified in the update_mask are
* relative to the resource, not the full request. A field will be overwritten
* if it is in the mask. The mask is required and a value of * will update all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Topic resource by the update. The fields specified in the update_mask are
* relative to the resource, not the full request. A field will be overwritten
* if it is in the mask. The mask is required and a value of * will update all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Topic resource by the update. The fields specified in the update_mask are
* relative to the resource, not the full request. A field will be overwritten
* if it is in the mask. The mask is required and a value of * will update all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Topic resource by the update. The fields specified in the update_mask are
* relative to the resource, not the full request. A field will be overwritten
* if it is in the mask. The mask is required and a value of * will update all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Topic resource by the update. The fields specified in the update_mask are
* relative to the resource, not the full request. A field will be overwritten
* if it is in the mask. The mask is required and a value of * will update all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Topic resource by the update. The fields specified in the update_mask are
* relative to the resource, not the full request. A field will be overwritten
* if it is in the mask. The mask is required and a value of * will update all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000001);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Topic resource by the update. The fields specified in the update_mask are
* relative to the resource, not the full request. A field will be overwritten
* if it is in the mask. The mask is required and a value of * will update all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Topic resource by the update. The fields specified in the update_mask are
* relative to the resource, not the full request. A field will be overwritten
* if it is in the mask. The mask is required and a value of * will update all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Topic resource by the update. The fields specified in the update_mask are
* relative to the resource, not the full request. A field will be overwritten
* if it is in the mask. The mask is required and a value of * will update all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
private com.google.cloud.managedkafka.v1.Topic topic_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.managedkafka.v1.Topic,
com.google.cloud.managedkafka.v1.Topic.Builder,
com.google.cloud.managedkafka.v1.TopicOrBuilder>
topicBuilder_;
/**
*
*
* <pre>
* Required. The topic to update. Its `name` field must be populated.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.Topic topic = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the topic field is set.
*/
public boolean hasTopic() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The topic to update. Its `name` field must be populated.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.Topic topic = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The topic.
*/
public com.google.cloud.managedkafka.v1.Topic getTopic() {
if (topicBuilder_ == null) {
return topic_ == null
? com.google.cloud.managedkafka.v1.Topic.getDefaultInstance()
: topic_;
} else {
return topicBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The topic to update. Its `name` field must be populated.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.Topic topic = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTopic(com.google.cloud.managedkafka.v1.Topic value) {
if (topicBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
topic_ = value;
} else {
topicBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The topic to update. Its `name` field must be populated.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.Topic topic = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTopic(com.google.cloud.managedkafka.v1.Topic.Builder builderForValue) {
if (topicBuilder_ == null) {
topic_ = builderForValue.build();
} else {
topicBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The topic to update. Its `name` field must be populated.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.Topic topic = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeTopic(com.google.cloud.managedkafka.v1.Topic value) {
if (topicBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& topic_ != null
&& topic_ != com.google.cloud.managedkafka.v1.Topic.getDefaultInstance()) {
getTopicBuilder().mergeFrom(value);
} else {
topic_ = value;
}
} else {
topicBuilder_.mergeFrom(value);
}
if (topic_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The topic to update. Its `name` field must be populated.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.Topic topic = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearTopic() {
bitField0_ = (bitField0_ & ~0x00000002);
topic_ = null;
if (topicBuilder_ != null) {
topicBuilder_.dispose();
topicBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The topic to update. Its `name` field must be populated.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.Topic topic = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.managedkafka.v1.Topic.Builder getTopicBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getTopicFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The topic to update. Its `name` field must be populated.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.Topic topic = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.managedkafka.v1.TopicOrBuilder getTopicOrBuilder() {
if (topicBuilder_ != null) {
return topicBuilder_.getMessageOrBuilder();
} else {
return topic_ == null
? com.google.cloud.managedkafka.v1.Topic.getDefaultInstance()
: topic_;
}
}
/**
*
*
* <pre>
* Required. The topic to update. Its `name` field must be populated.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.Topic topic = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.managedkafka.v1.Topic,
com.google.cloud.managedkafka.v1.Topic.Builder,
com.google.cloud.managedkafka.v1.TopicOrBuilder>
getTopicFieldBuilder() {
if (topicBuilder_ == null) {
topicBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.managedkafka.v1.Topic,
com.google.cloud.managedkafka.v1.Topic.Builder,
com.google.cloud.managedkafka.v1.TopicOrBuilder>(
getTopic(), getParentForChildren(), isClean());
topic_ = null;
}
return topicBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.managedkafka.v1.UpdateTopicRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.managedkafka.v1.UpdateTopicRequest)
private static final com.google.cloud.managedkafka.v1.UpdateTopicRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.managedkafka.v1.UpdateTopicRequest();
}
public static com.google.cloud.managedkafka.v1.UpdateTopicRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateTopicRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateTopicRequest>() {
@java.lang.Override
public UpdateTopicRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateTopicRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateTopicRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.UpdateTopicRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/lucene | 35,735 | lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestFlattenGraphFilter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.analysis.core;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.LinkedList;
import java.util.List;
import java.util.Random;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.AutomatonToTokenStream;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.StopFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.TokenStreamToAutomaton;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.synonym.SynonymGraphFilter;
import org.apache.lucene.analysis.synonym.SynonymMap;
import org.apache.lucene.tests.analysis.BaseTokenStreamTestCase;
import org.apache.lucene.tests.analysis.CannedTokenStream;
import org.apache.lucene.tests.analysis.MockTokenizer;
import org.apache.lucene.tests.analysis.Token;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CharsRef;
import org.apache.lucene.util.CharsRefBuilder;
import org.apache.lucene.util.automaton.Automata;
import org.apache.lucene.util.automaton.Automaton;
import org.apache.lucene.util.automaton.Operations;
import org.apache.lucene.util.automaton.Transition;
public class TestFlattenGraphFilter extends BaseTokenStreamTestCase {
private static Token token(
String term, int posInc, int posLength, int startOffset, int endOffset) {
final Token t = new Token(term, startOffset, endOffset);
t.setPositionIncrement(posInc);
t.setPositionLength(posLength);
return t;
}
public void testSimpleMock() throws Exception {
Analyzer a =
new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer tokenizer = new MockTokenizer(MockTokenizer.SIMPLE, true);
TokenStream ts = new FlattenGraphFilter(tokenizer);
return new TokenStreamComponents(tokenizer, ts);
}
};
assertAnalyzesTo(
a,
"wtf happened",
new String[] {"wtf", "happened"},
new int[] {0, 4},
new int[] {3, 12},
null,
new int[] {1, 1},
new int[] {1, 1},
true);
}
// Make sure graph is unchanged if it's already flat
public void testAlreadyFlatten() throws Exception {
TokenStream in =
new CannedTokenStream(
0,
12,
token("wtf", 1, 1, 0, 3),
token("what", 0, 1, 0, 3),
token("wow", 0, 1, 0, 3),
token("the", 1, 1, 0, 3),
token("that's", 0, 1, 0, 3),
token("fudge", 1, 1, 0, 3),
token("funny", 0, 1, 0, 3),
token("happened", 1, 1, 4, 12));
TokenStream out = new FlattenGraphFilter(in);
// ... but on output, it's flattened to wtf/what/wow that's/the fudge/funny happened:
assertTokenStreamContents(
out,
new String[] {"wtf", "what", "wow", "the", "that's", "fudge", "funny", "happened"},
new int[] {0, 0, 0, 0, 0, 0, 0, 4},
new int[] {3, 3, 3, 3, 3, 3, 3, 12},
new int[] {1, 0, 0, 1, 0, 1, 0, 1},
new int[] {1, 1, 1, 1, 1, 1, 1, 1},
12);
}
public void testWTF1() throws Exception {
// "wow that's funny" and "what the fudge" are separate side paths, in parallel with "wtf", on
// input:
TokenStream in =
new CannedTokenStream(
0,
12,
token("wtf", 1, 5, 0, 3),
token("what", 0, 1, 0, 3),
token("wow", 0, 3, 0, 3),
token("the", 1, 1, 0, 3),
token("fudge", 1, 3, 0, 3),
token("that's", 1, 1, 0, 3),
token("funny", 1, 1, 0, 3),
token("happened", 1, 1, 4, 12));
TokenStream out = new FlattenGraphFilter(in);
// ... but on output, it's flattened to wtf/what/wow that's/the fudge/funny happened:
assertTokenStreamContents(
out,
new String[] {"wtf", "what", "wow", "the", "that's", "fudge", "funny", "happened"},
new int[] {0, 0, 0, 0, 0, 0, 0, 4},
new int[] {3, 3, 3, 3, 3, 3, 3, 12},
new int[] {1, 0, 0, 1, 0, 1, 0, 1},
new int[] {3, 1, 1, 1, 1, 1, 1, 1},
12);
}
/** Same as testWTF1 except the "wtf" token comes out later */
public void testWTF2() throws Exception {
// "wow that's funny" and "what the fudge" are separate side paths, in parallel with "wtf", on
// input:
TokenStream in =
new CannedTokenStream(
0,
12,
token("what", 1, 1, 0, 3),
token("wow", 0, 3, 0, 3),
token("wtf", 0, 5, 0, 3),
token("the", 1, 1, 0, 3),
token("fudge", 1, 3, 0, 3),
token("that's", 1, 1, 0, 3),
token("funny", 1, 1, 0, 3),
token("happened", 1, 1, 4, 12));
TokenStream out = new FlattenGraphFilter(in);
// ... but on output, it's flattened to wtf/what/wow that's/the fudge/funny happened:
assertTokenStreamContents(
out,
new String[] {"what", "wow", "wtf", "the", "that's", "fudge", "funny", "happened"},
new int[] {0, 0, 0, 0, 0, 0, 0, 4},
new int[] {3, 3, 3, 3, 3, 3, 3, 12},
new int[] {1, 0, 0, 1, 0, 1, 0, 1},
new int[] {1, 1, 3, 1, 1, 1, 1, 1},
12);
}
public void testNonGreedySynonyms() throws Exception {
// This is just "hypothetical" for Lucene today, because SynFilter is
// greedy: when two syn rules match on overlapping tokens, only one
// (greedily) wins. This test pretends all syn matches could match:
TokenStream in =
new CannedTokenStream(
0,
20,
token("wizard", 1, 1, 0, 6),
token("wizard_of_oz", 0, 3, 0, 12),
token("of", 1, 1, 7, 9),
token("oz", 1, 1, 10, 12),
token("oz_screams", 0, 2, 10, 20),
token("screams", 1, 1, 13, 20));
TokenStream out = new FlattenGraphFilter(in);
// ... but on output, it's flattened to wtf/what/wow that's/the fudge/funny happened:
assertTokenStreamContents(
out,
new String[] {"wizard", "wizard_of_oz", "of", "oz", "oz_screams", "screams"},
new int[] {0, 0, 7, 10, 10, 13},
new int[] {6, 12, 9, 12, 20, 20},
new int[] {1, 0, 1, 1, 0, 1},
new int[] {1, 3, 1, 1, 2, 1},
20);
}
public void testNonGraph() throws Exception {
TokenStream in =
new CannedTokenStream(
0,
22,
token("hello", 1, 1, 0, 5),
token("pseudo", 1, 1, 6, 12),
token("world", 1, 1, 13, 18),
token("fun", 1, 1, 19, 22));
TokenStream out = new FlattenGraphFilter(in);
// ... but on output, it's flattened to wtf/what/wow that's/the fudge/funny happened:
assertTokenStreamContents(
out,
new String[] {"hello", "pseudo", "world", "fun"},
new int[] {0, 6, 13, 19},
new int[] {5, 12, 18, 22},
new int[] {1, 1, 1, 1},
new int[] {1, 1, 1, 1},
22);
}
public void testSimpleHole() throws Exception {
TokenStream in =
new CannedTokenStream(
0,
13,
token("hello", 1, 1, 0, 5),
token("hole", 2, 1, 6, 10),
token("fun", 1, 1, 11, 13));
TokenStream out = new FlattenGraphFilter(in);
assertTokenStreamContents(
out,
new String[] {"hello", "hole", "fun"},
new int[] {0, 6, 11},
new int[] {5, 10, 13},
new int[] {1, 2, 1},
new int[] {1, 1, 1},
13);
}
public void testHoleUnderSyn() throws Exception {
// Tests a StopFilter after SynFilter where a stopword in a syn is removed
//
// wizard of oz -> woz syn, but then "of" becomes a hole
TokenStream in =
new CannedTokenStream(
0,
12,
token("wizard", 1, 1, 0, 6),
token("woz", 0, 3, 0, 12),
token("oz", 2, 1, 10, 12));
TokenStream out = new FlattenGraphFilter(in);
assertTokenStreamContents(
out,
new String[] {"wizard", "woz", "oz"},
new int[] {0, 0, 10},
new int[] {6, 12, 12},
new int[] {1, 0, 2},
new int[] {1, 3, 1},
12);
}
public void testStrangelyNumberedNodes() throws Exception {
// Uses only nodes 0, 2, 3, i.e. 1 is just never used (it is not a hole!!)
TokenStream in =
new CannedTokenStream(
0,
27,
token("dog", 1, 3, 0, 5),
token("puppy", 0, 3, 0, 5),
token("flies", 3, 1, 6, 11));
TokenStream out = new FlattenGraphFilter(in);
assertTokenStreamContents(
out,
new String[] {"dog", "puppy", "flies"},
new int[] {0, 0, 6},
new int[] {5, 5, 11},
new int[] {1, 0, 1},
new int[] {1, 1, 1},
27);
}
public void testTwoLongParallelPaths() throws Exception {
// "a a a a a a" in parallel with "b b b b b b"
TokenStream in =
new CannedTokenStream(
0,
11,
token("a", 1, 1, 0, 1),
token("b", 0, 2, 0, 1),
token("a", 1, 2, 2, 3),
token("b", 1, 2, 2, 3),
token("a", 1, 2, 4, 5),
token("b", 1, 2, 4, 5),
token("a", 1, 2, 6, 7),
token("b", 1, 2, 6, 7),
token("a", 1, 2, 8, 9),
token("b", 1, 2, 8, 9),
token("a", 1, 2, 10, 11),
token("b", 1, 2, 10, 11));
TokenStream out = new FlattenGraphFilter(in);
// ... becomes flattened to a single path with overlapping a/b token between each node:
assertTokenStreamContents(
out,
new String[] {"a", "b", "a", "b", "a", "b", "a", "b", "a", "b", "a", "b"},
new int[] {0, 0, 2, 2, 4, 4, 6, 6, 8, 8, 10, 10},
new int[] {1, 1, 3, 3, 5, 5, 7, 7, 9, 9, 11, 11},
new int[] {1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0},
new int[] {1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1},
11);
}
// b has a posInc of 1, which is correct, but no edge ever visited that node.
// After hole recovery 'b' and 'c' should still be under 'abc'
// assert disabled = pos length of abc = 4
// assert enabled = AssertionError: outputEndNode=3 vs inputTo=2
public void testAltPathFirstStepHole() throws Exception {
TokenStream in =
new CannedTokenStream(
0, 3, token("abc", 1, 3, 0, 3), token("b", 1, 1, 1, 2), token("c", 1, 1, 2, 3));
TokenStream out = new FlattenGraphFilter(in);
assertTokenStreamContents(
out,
new String[] {"abc", "b", "c"},
new int[] {0, 1, 2},
new int[] {3, 2, 3},
new int[] {1, 1, 1},
new int[] {3, 1, 1},
3);
}
// Last node in an alt path fixes output node of long path. In this graph the follow-up node fixes
// that.
// incorrect pos length of abc = 1
public void testAltPathLastStepHole() throws Exception {
TokenStream in =
new CannedTokenStream(
0,
4,
token("abc", 1, 3, 0, 3),
token("a", 0, 1, 0, 1),
token("b", 1, 1, 1, 2),
token("d", 2, 1, 3, 4));
TokenStream out = new FlattenGraphFilter(in);
assertTokenStreamContents(
out,
new String[] {"abc", "a", "b", "d"},
new int[] {0, 0, 1, 3},
new int[] {1, 1, 2, 4},
new int[] {1, 0, 1, 2},
new int[] {3, 1, 1, 1},
4);
}
// Check to see how multiple holes in a row are preserved.
public void testLongHole() throws Exception {
TokenStream in =
new CannedTokenStream(
0,
28,
token("hello", 1, 1, 0, 5),
token("hole", 5, 1, 20, 24),
token("fun", 1, 1, 25, 28));
TokenStream out = new FlattenGraphFilter(in);
assertTokenStreamContents(
out,
new String[] {"hello", "hole", "fun"},
new int[] {0, 20, 25},
new int[] {5, 24, 28},
new int[] {1, 2, 1},
new int[] {1, 1, 1},
28);
}
// multiple nodes missing in the alt path.
// assert disabled = nothing
// assert enabled = AssertionError
public void testAltPathLastStepLongHole() throws Exception {
TokenStream in =
new CannedTokenStream(
0, 4, token("abc", 1, 3, 0, 3), token("a", 0, 1, 0, 1), token("d", 3, 1, 3, 4));
TokenStream out = new FlattenGraphFilter(in);
assertTokenStreamContents(
out,
new String[] {"abc", "a", "d"},
new int[] {0, 0, 3},
new int[] {1, 1, 4},
new int[] {1, 0, 2},
new int[] {2, 1, 1},
4);
}
// LUCENE-8723
// Token stream ends without any edge to fix the long edge's output node
// assert disabled = dropped token
// assert enabled = AssertionError: 2
public void testAltPathLastStepHoleWithoutEndToken() throws Exception {
TokenStream in =
new CannedTokenStream(
0, 2, token("abc", 1, 3, 0, 3), token("a", 0, 1, 0, 1), token("b", 1, 1, 1, 2));
TokenStream out = new FlattenGraphFilter(in);
assertTokenStreamContents(
out,
new String[] {"abc", "a", "b"},
new int[] {0, 0, 1},
new int[] {1, 1, 2},
new int[] {1, 0, 1},
new int[] {1, 1, 1},
2);
}
// similar to AltPathLastStepHoleWithoutEndToken, but instead of no token to trigger long path
// resolution,
// the next token has no way to reference to the long path so we have to resolve as if that last
// token wasn't present.
public void testAltPathLastStepHoleFollowedByHole() throws Exception {
TokenStream in =
new CannedTokenStream(
0, 5, token("abc", 1, 3, 0, 3), token("b", 1, 1, 1, 2), token("e", 3, 1, 4, 5));
TokenStream out = new FlattenGraphFilter(in);
assertTokenStreamContents(
out,
new String[] {"abc", "b", "e"},
new int[] {0, 1, 4},
new int[] {3, 2, 5},
new int[] {1, 1, 2},
new int[] {1, 1, 1},
5);
}
// Two Shingled long paths pass each other which gives a flattened graph with tokens backing up a
// lot.
public void testShingledGap() throws Exception {
TokenStream in =
new CannedTokenStream(
0,
5,
token("abc", 1, 3, 0, 3),
token("a", 0, 1, 0, 1),
token("b", 1, 1, 1, 2),
token("cde", 1, 3, 2, 5),
token("d", 1, 1, 3, 4),
token("e", 1, 1, 4, 5));
TokenStream out = new FlattenGraphFilter(in);
assertTokenStreamContents(
out,
new String[] {"abc", "a", "d", "b", "cde", "e"},
new int[] {0, 0, 3, 3, 4, 4},
new int[] {1, 1, 3, 3, 5, 5},
new int[] {1, 0, 1, 0, 1, 0},
new int[] {1, 1, 1, 1, 1, 1},
5);
}
// With shingles, token order may change during flattening.
// We need to be careful not to free input nodes if they still have unreleased edges.
// with/without exceptions ArrayIndexOutOfBoundsException
public void testShingledGapWithHoles() throws Exception {
TokenStream in =
new CannedTokenStream(
0,
5,
token("abc", 1, 3, 0, 3),
token("b", 1, 1, 1, 2),
token("cde", 1, 3, 2, 5),
token("d", 1, 1, 3, 4),
token("e", 1, 1, 4, 5));
TokenStream out = new FlattenGraphFilter(in);
assertTokenStreamContents(
out,
new String[] {"abc", "d", "b", "cde", "e"},
new int[] {0, 3, 3, 4, 4},
new int[] {3, 3, 3, 5, 5},
new int[] {1, 1, 0, 1, 0},
new int[] {1, 1, 1, 1, 1},
5);
}
// When the first token is a hole there is no original token to offset from.
public void testFirstTokenHole() throws Exception {
TokenStream in = new CannedTokenStream(0, 9, token("start", 2, 1, 0, 5));
TokenStream out = new FlattenGraphFilter(in);
assertTokenStreamContents(
out, new String[] {"start"}, new int[] {0}, new int[] {5}, new int[] {2}, new int[] {1}, 9);
}
// The singled token starts from a hole.
// Hole recovery will cause the shingled token to start later in the output than its alternate
// paths.
// This will result in it being released too early.
public void testShingleFromGap() throws Exception {
TokenStream in =
new CannedTokenStream(
0,
9,
token("a", 1, 1, 4, 8),
token("abc", 0, 3, 4, 7),
token("cd", 2, 2, 6, 8),
token("d", 1, 1, 7, 8),
token("e", 1, 1, 8, 9));
TokenStream out = new FlattenGraphFilter(in);
assertTokenStreamContents(
out,
new String[] {"a", "abc", "d", "cd", "e"},
new int[] {4, 4, 7, 7, 8},
new int[] {7, 7, 8, 8, 9},
new int[] {1, 0, 1, 1, 1},
new int[] {1, 1, 2, 1, 1},
9);
}
public void testShingledGapAltPath() throws Exception {
TokenStream in =
new CannedTokenStream(
0, 4, token("abc", 1, 3, 0, 3), token("abcd", 0, 4, 0, 4), token("cd", 2, 2, 2, 4));
TokenStream out = new FlattenGraphFilter(in);
assertTokenStreamContents(
out,
new String[] {"abc", "abcd", "cd"},
new int[] {0, 0, 2},
new int[] {3, 4, 4},
new int[] {1, 0, 1},
new int[] {1, 2, 1},
4);
}
// Lots of shingles and alternate paths connecting to each other. One edge 'c' missing between
// 'ab' and 'def'
public void testHeavilyConnectedGraphWithGap() throws IOException {
TokenStream in =
new CannedTokenStream(
0,
7,
token("a", 1, 1, 0, 1),
token("ab", 0, 2, 0, 2),
token("abcdef", 0, 6, 0, 6),
token("abcd", 0, 4, 0, 4),
token("bcdef", 1, 5, 1, 7),
token("def", 2, 3, 4, 7),
token("e", 1, 1, 5, 6),
token("f", 1, 1, 6, 7));
TokenStream out = new FlattenGraphFilter(in);
assertTokenStreamContents(
out,
new String[] {"a", "ab", "abcdef", "abcd", "bcdef", "e", "def", "f"},
new int[] {0, 0, 0, 0, 5, 5, 6, 6},
new int[] {1, 1, 7, 1, 7, 6, 7, 7},
new int[] {1, 0, 0, 0, 1, 0, 1, 0},
new int[] {1, 1, 3, 1, 2, 1, 1, 1},
7);
}
// This graph can create a disconnected input node that is farther ahead in the output than its
// subsequent input node.
// Exceptions: Free too early or dropped tokens.
public void testShingleWithLargeLeadingGap() throws IOException {
TokenStream in =
new CannedTokenStream(
0, 6, token("abcde", 1, 5, 0, 5), token("ef", 4, 2, 4, 6), token("f", 1, 1, 5, 6));
TokenStream out = new FlattenGraphFilter(in);
assertTokenStreamContents(
out,
new String[] {"abcde", "f", "ef"},
new int[] {0, 5, 5},
new int[] {5, 6, 6},
new int[] {1, 1, 0},
new int[] {1, 1, 1},
6);
}
/**
* build CharsRef containing 2-4 tokens
*
* @param tokens vocabulary of tokens
* @param charsRefBuilder CharsRefBuilder
* @param random Random for selecting tokens
* @return Charsref containing 2-4 tokens.
*/
private CharsRef buildMultiTokenCharsRef(
String[] tokens, CharsRefBuilder charsRefBuilder, Random random) {
int srcLen = random.nextInt(2) + 2;
String[] srcTokens = new String[srcLen];
for (int pos = 0; pos < srcLen; pos++) {
srcTokens[pos] = tokens[random().nextInt(tokens.length)];
}
SynonymMap.Builder.join(srcTokens, charsRefBuilder);
return charsRefBuilder.toCharsRef();
}
// Create a random graph then delete some edges to see if we can trip up FlattenGraphFilter
public void testRandomGraphs() throws Exception {
String[] baseTokens = new String[] {"t1", "t2", "t3", "t4"};
String[] synTokens = new String[] {"s1", "s2", "s3", "s4"};
SynonymMap.Builder mapBuilder = new SynonymMap.Builder();
CharsRefBuilder charRefBuilder = new CharsRefBuilder();
Random random = random();
// between 10 and 20 synonym entries
int synCount = random.nextInt(10) + 10;
for (int i = 0; i < synCount; i++) {
int type = random.nextInt(4);
CharsRef src;
CharsRef dest;
switch (type) {
case 0:
// 1:1
src = charRefBuilder.append(baseTokens[random.nextInt(baseTokens.length)]).toCharsRef();
charRefBuilder.clear();
dest = charRefBuilder.append(synTokens[random.nextInt(synTokens.length)]).toCharsRef();
charRefBuilder.clear();
break;
case 1:
// many:1
src = buildMultiTokenCharsRef(baseTokens, charRefBuilder, random);
charRefBuilder.clear();
dest = charRefBuilder.append(synTokens[random.nextInt(synTokens.length)]).toCharsRef();
charRefBuilder.clear();
break;
case 2:
// 1:many
src = charRefBuilder.append(baseTokens[random.nextInt(baseTokens.length)]).toCharsRef();
charRefBuilder.clear();
dest = buildMultiTokenCharsRef(synTokens, charRefBuilder, random);
charRefBuilder.clear();
break;
default:
// many:many
src = buildMultiTokenCharsRef(baseTokens, charRefBuilder, random);
charRefBuilder.clear();
dest = buildMultiTokenCharsRef(synTokens, charRefBuilder, random);
charRefBuilder.clear();
}
mapBuilder.add(src, dest, true);
}
SynonymMap synMap = mapBuilder.build();
int stopWordCount = random.nextInt(4) + 1;
CharArraySet stopWords = new CharArraySet(stopWordCount, true);
while (stopWords.size() < stopWordCount) {
int index = random.nextInt(baseTokens.length + synTokens.length);
String[] tokenArray = baseTokens;
if (index >= baseTokens.length) {
index -= baseTokens.length;
tokenArray = synTokens;
}
stopWords.add(tokenArray[index]);
}
Analyzer withFlattenGraph =
new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer in = new WhitespaceTokenizer();
TokenStream result = new SynonymGraphFilter(in, synMap, true);
result = new StopFilter(result, stopWords);
result = new FlattenGraphFilter(result);
return new TokenStreamComponents(in, result);
}
};
int tokenCount = random.nextInt(20) + 20;
List<String> stringTokens = new ArrayList<>();
while (stringTokens.size() < tokenCount) {
stringTokens.add(baseTokens[random.nextInt(baseTokens.length)]);
}
String text = String.join(" ", stringTokens);
// FlattenGraphFilter can create inconsistent offsets.
// If that is resolved we can check offsets
// Until then converting to automaton will pull text through and check if we hit asserts.
// checkAnalysisConsistency(random, withFlattenGraph, false, text);
TokenStreamToAutomaton tsta = new TokenStreamToAutomaton();
TokenStream flattenedTokenStream = withFlattenGraph.tokenStream("field", text);
assertFalse(Operations.hasDeadStates(tsta.toAutomaton(flattenedTokenStream)));
flattenedTokenStream.close();
/*
* CheckGeneralization can get VERY slow as matching holes to tokens or other holes generates a lot of potentially valid paths.
* Analyzer withoutFlattenGraph =
* new Analyzer() {
* @Override
* protected TokenStreamComponents createComponents(String fieldName) {
* Tokenizer in = new WhitespaceTokenizer();
* TokenStream result = new SynonymGraphFilter(in, synMap, true);
* result = new StopFilter(result, stopWords);
* return new TokenStreamComponents(in, result);
* }
* };
* checkGeneralization(
* withFlattenGraph.tokenStream("field", text),
* withoutFlattenGraph.tokenStream("field", text));
*
*/
}
/*
* Make some strings, make an automaton that accepts those strings, convert that automaton into a TokenStream,
* flatten it, back to an automaton, and see if the original strings are still accepted.
*/
public void testPathsNotLost() throws IOException {
int wordCount = random().nextInt(5) + 5;
List<BytesRef> acceptStrings = new LinkedList<>();
for (int i = 0; i < wordCount; i++) {
int wordLen = random().nextInt(5) + 5;
BytesRef ref = new BytesRef(wordLen);
ref.length = wordLen;
ref.offset = 0;
for (int j = 0; j < wordLen; j++) {
ref.bytes[j] = (byte) (random().nextInt(5) + 65);
}
acceptStrings.add(ref);
}
acceptStrings.sort(Comparator.naturalOrder());
acceptStrings = acceptStrings.stream().limit(wordCount).toList();
Automaton nonFlattenedAutomaton = Automata.makeStringUnion(acceptStrings);
TokenStream ts = AutomatonToTokenStream.toTokenStream(nonFlattenedAutomaton);
TokenStream flattenedTokenStream = new FlattenGraphFilter(ts);
TokenStreamToAutomaton tsta = new TokenStreamToAutomaton();
Automaton flattenedAutomaton = tsta.toAutomaton(flattenedTokenStream);
// TokenStreamToAutomaton adds position increment transitions into the automaton.
List<BytesRef> acceptStringsWithPosSep = createAcceptStringsWithPosSep(acceptStrings);
for (BytesRef acceptString : acceptStringsWithPosSep) {
assertTrue(
"string not accepted " + acceptString.utf8ToString(),
recursivelyValidate(acceptString, 0, 0, flattenedAutomaton));
}
}
/**
* adds POS_SEP bytes between bytes to match TokenStreamToAutomaton format.
*
* @param acceptStrings Byte refs of accepted strings. Each byte is a transition
* @return List of ByteRefs where each byte is separated by a POS_SEP byte.
*/
private List<BytesRef> createAcceptStringsWithPosSep(List<BytesRef> acceptStrings) {
List<BytesRef> acceptStringsWithPosSep = new ArrayList<>();
for (BytesRef acceptString : acceptStrings) {
BytesRef withPosSep = new BytesRef(acceptString.length * 2 - 1);
withPosSep.length = acceptString.length * 2 - 1;
withPosSep.offset = 0;
for (int i = 0; i < acceptString.length; i++) {
withPosSep.bytes[i * 2] = acceptString.bytes[i];
if (i * 2 + 1 < withPosSep.length) {
withPosSep.bytes[i * 2 + 1] = TokenStreamToAutomaton.POS_SEP;
}
}
acceptStringsWithPosSep.add(withPosSep);
}
return acceptStringsWithPosSep;
}
/**
* Checks if acceptString is accepted by the automaton. Automaton may be an NFA.
*
* @param acceptString String to test
* @param acceptStringIndex current index into acceptString, initial value should be 0
* @param state state to transition from. initial value should be 0
* @param automaton Automaton to test
* @return true if acceptString is accepted by the automaton. otherwise false.
*/
public boolean recursivelyValidate(
BytesRef acceptString, int acceptStringIndex, int state, Automaton automaton) {
if (acceptStringIndex == acceptString.length) {
return automaton.isAccept(state);
}
Transition transition = new Transition();
automaton.initTransition(state, transition);
int numTransitions = automaton.getNumTransitions(state);
boolean accept = false;
// Automaton can be NFA, so we need to check all matching transitions
for (int i = 0; i < numTransitions; i++) {
automaton.getTransition(state, i, transition);
if (transition.min <= acceptString.bytes[acceptStringIndex]
&& transition.max >= acceptString.bytes[acceptStringIndex]) {
accept =
recursivelyValidate(acceptString, acceptStringIndex + 1, transition.dest, automaton);
}
if (accept == true) {
break;
}
}
return accept;
}
/*
* This method checks if strings that lead to the accept state of the not flattened TokenStream
* also lead to the accept state in the flattened TokenStream. This gets complicated when you
* factor in holes. The FlattenGraphFilter will remove alternate paths that are made entirely of
* holes. An alternate path of Holes is indistinguishable from a path that just has long
* lengths(ex: testStrangelyNumberedNodes). Also alternate paths that end in multiple holes could
* be interpreted as sequential holes after the branching has converged during flattening. This
* leads to a lot of weird logic about navigating around holes that may compromise the accuracy of
* this test.
*
* @param flattened flattened TokenStream
* @param notFlattened not flattened TokenStream
* @throws IOException on error creating Automata
*/
/* private void checkGeneralization(TokenStream flattened, TokenStream notFlattened)
throws IOException {
TokenStreamToAutomaton tsta = new TokenStreamToAutomaton();
List<LinkedList<Integer>> acceptStrings = getAcceptStrings(tsta.toAutomaton(notFlattened));
checkAcceptStrings(acceptStrings, tsta.toAutomaton(flattened));
flattened.close();
notFlattened.close();
}*/
/*
* gets up to 10000 strings that lead to accept state in the given automaton.
*
* @param automaton automaton
* @return list of accept sequences
*/
/* private List<LinkedList<Integer>> getAcceptStrings(Automaton automaton) {
List<LinkedList<Integer>> acceptedSequences = new LinkedList<>();
LinkedList<Integer> prefix = new LinkedList<>();
// state 0 is always the start node
// Particularly branching automatons can create lots of possible acceptable strings. limit to
// the first 10K
buildAcceptStringRecursive(automaton, 0, prefix, acceptedSequences, 10000);
return acceptedSequences;
}*/
/*
* @param automaton automaton to generate strings from
* @param state state to start at
* @param prefix string prefix
* @param acceptedSequences List of strings build so far.
* @param limit maximum number of acceptedSequences.
*/
/*private void buildAcceptStringRecursive(
Automaton automaton,
int state,
LinkedList<Integer> prefix,
List<LinkedList<Integer>> acceptedSequences,
int limit) {
if (acceptedSequences.size() == limit) {
return;
}
if (automaton.isAccept(state)) {
acceptedSequences.add(new LinkedList<>(prefix));
return;
}
int numTransitions = automaton.getNumTransitions(state);
Transition transition = new Transition();
for (int i = 0; i < numTransitions; i++) {
automaton.getTransition(state, i, transition);
// min and max are the same transitions made from TokenStreamToAutomaton
prefix.addLast(transition.min);
buildAcceptStringRecursive(automaton, transition.dest, prefix, acceptedSequences, limit);
prefix.removeLast();
}
}
private void checkAcceptStrings(List<LinkedList<Integer>> acceptSequence, Automaton automaton) {
for (LinkedList<Integer> acceptString : acceptSequence) {
assertTrue(
"String did not lead to accept state " + acceptString,
recursivelyValidateWithHoles(acceptString, 0, automaton));
}
}
private boolean recursivelyValidateWithHoles(
LinkedList<Integer> acceptSequence, int state, Automaton automaton) {
if (acceptSequence.isEmpty()) {
return automaton.isAccept(state);
}
Integer curr = acceptSequence.pop();
int numTransitions = automaton.getNumTransitions(state);
Transition transition = new Transition();
boolean accept = false;
// Automaton can be NFA, so we need to check all matching transitions
for (int i = 0; i < numTransitions; i++) {
automaton.getTransition(state, i, transition);
if (transition.min <= curr && transition.max >= curr) {
accept = recursivelyValidateWithHoles(acceptSequence, transition.dest, automaton);
// Factoring in flattened graphs the space covered by a hole may be bigger in the flattened
// graph.
// Try consuming more steps with holes.
if (accept == false
&& transition.min == TokenStreamToAutomaton.HOLE
&& transition.max == TokenStreamToAutomaton.HOLE) {
acceptSequence.push(TokenStreamToAutomaton.HOLE);
acceptSequence.push(TokenStreamToAutomaton.POS_SEP);
accept = recursivelyValidateWithHoles(acceptSequence, transition.dest, automaton);
acceptSequence.pop();
acceptSequence.pop();
}
} else if (transition.min == TokenStreamToAutomaton.HOLE
&& transition.max == TokenStreamToAutomaton.HOLE
&& automaton.getNumTransitions(transition.dest) > 0) {
//consume multiple holes in the automaton
// clear POS_INC
automaton.getTransition(transition.dest, 0, transition);
acceptSequence.push(curr);
accept = recursivelyValidateWithHoles(acceptSequence, transition.dest, automaton);
acceptSequence.pop();
} else if(curr == TokenStreamToAutomaton.HOLE) {
//consume non-holes in the automaton with holes
while (transition.min != TokenStreamToAutomaton.POS_SEP
&& automaton.getNumTransitions(transition.dest) > 0) {
automaton.getTransition(transition.dest, 0, transition);
}
acceptSequence.push(curr);
accept = recursivelyValidateWithHoles(acceptSequence, transition.dest, automaton);
acceptSequence.pop();
}
if (accept) {
break;
}
}
// Flatten graph filter will remove side paths that are only Holes. Gaps may also change size as
// graph is flattened.
// Traverse over them if curr is a hole to make sure the gap is kept
if (accept == false && curr == TokenStreamToAutomaton.HOLE && acceptSequence.size() > 0) {
// get rid of the separator
acceptSequence.pop();
for (int i = 0; i < numTransitions; i++) {
automaton.getTransition(state, i, transition);
//advance to the next POS_SEP in automaton
while (transition.min != TokenStreamToAutomaton.POS_SEP
&& automaton.getNumTransitions(transition.dest) > 0) {
automaton.getTransition(transition.dest, 0, transition);
}
accept = recursivelyValidateWithHoles(acceptSequence, transition.dest, automaton);
if (accept) {
break;
}
}
// might be multiple holes squashed under a one step path. Try burning remaining holes
if (accept == false) {
accept = recursivelyValidateWithHoles(acceptSequence, state, automaton);
}
acceptSequence.push(TokenStreamToAutomaton.POS_SEP);
}
acceptSequence.push(curr);
return accept;
} */
// NOTE: TestSynonymGraphFilter's testRandomSyns also tests FlattenGraphFilter
}
|
apache/sis | 34,588 | endorsed/src/org.apache.sis.referencing/test/org/apache/sis/geometry/GeneralEnvelopeTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sis.geometry;
import java.time.Instant;
import static java.lang.Double.NaN;
import org.opengis.geometry.Envelope;
import org.opengis.geometry.DirectPosition;
import org.apache.sis.measure.Range;
import org.apache.sis.metadata.internal.shared.AxisNames;
import org.apache.sis.math.MathFunctions;
import org.apache.sis.io.wkt.Convention;
// Test dependencies
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;
import static org.opengis.test.Validators.validate;
import org.apache.sis.referencing.EPSGDependentTestCase;
import org.apache.sis.referencing.crs.HardCodedCRS;
import static org.apache.sis.referencing.crs.HardCodedCRS.WGS84;
import static org.apache.sis.referencing.crs.HardCodedCRS.WGS84_LATITUDE_FIRST;
import static org.apache.sis.test.Assertions.assertSerializedEquals;
import static org.apache.sis.test.Assertions.assertMessageContains;
import static org.apache.sis.referencing.Assertions.assertWktEquals;
// Specific to the main branch:
import static org.apache.sis.test.GeoapiAssert.PENDING_NEXT_GEOAPI_RELEASE;
/**
* Tests the {@link GeneralEnvelope} class. The {@link Envelope2D} class will also be tested as a
* side effect, because it is used for comparison purpose. Note that {@link AbstractEnvelopeTest}
* already tested {@code contains} and {@code intersects} methods, so this test file will focus on
* other methods.
*
* @author Martin Desruisseaux (IRD, Geomatys)
* @author Johann Sorel (Geomatys)
*/
public class GeneralEnvelopeTest extends EPSGDependentTestCase {
/**
* Tolerance threshold for floating point comparisons.
*/
private static final double EPS = 1E-4;
/**
* {@code false} if {@link #create(double, double, double, double)} can validate the envelope.
* This is set to {@code true} only when we intentionally want to create an invalid envelope,
* for example in order to test normalization.
*/
boolean skipValidation = !PENDING_NEXT_GEOAPI_RELEASE;
/**
* Creates a new test case.
*/
public GeneralEnvelopeTest() {
}
/**
* Creates a new geographic envelope for the given coordinate values.
* The {@literal [xmin … xmax]} may span the anti-meridian.
* This method is overridden by {@link SubEnvelopeTest}.
*/
GeneralEnvelope create(final double xmin, final double ymin, final double xmax, final double ymax) {
final GeneralEnvelope envelope = new GeneralEnvelope(2);
envelope.setCoordinateReferenceSystem(WGS84);
envelope.setEnvelope(xmin, ymin, xmax, ymax);
if (!skipValidation) {
validate(envelope);
}
return envelope;
}
/**
* Verifies invariants for the given envelope after each test.
* This method is overridden by {@link SubEnvelopeTest}.
*/
void verifyInvariants(final GeneralEnvelope envelope) {
assertSame(WGS84, envelope.getCoordinateReferenceSystem());
}
/**
* Asserts that the given two-dimensional envelope is equal to the given rectangle.
* The {@code xLower} and {@code xUpper} arguments are the <var>x</var> coordinate values
* for the lower and upper corners respectively. The actual {@code xmin} and {@code ymin}
* values will be inferred from those corners.
*
* <p>This method assumes that only the <var>x</var> axis may be a wraparound axis.</p>
*
* @param test the actual envelope to verify.
* @param xLower the expected first <var>x</var> coordinate value. May be greater than {@code xUpper}.
* @param xUpper the expected last <var>x</var> coordinate value. May be less than {@code xLower}.
* @param ymin the expected minimal <var>y</var> coordinate value. Must be less than {@code ymax}.
* @param ymax the expected maximal <var>y</var> coordinate value. Must be greater than {@code ymax}.
*/
private static void assertEnvelopeEquals(final Envelope test,
final double xLower, final double ymin, final double xUpper, final double ymax)
{
final double xmin, xmax;
if (MathFunctions.isNegative(xUpper - xLower)) { // Check for anti-meridian crossing.
xmin = -180;
xmax = +180;
} else {
xmin = xLower;
xmax = xUpper;
}
final DirectPosition lower = test.getLowerCorner();
final DirectPosition upper = test.getUpperCorner();
assertEquals(xLower, lower.getOrdinate (0), "lower");
assertEquals(xUpper, upper.getOrdinate (0), "upper");
assertEquals(xmin, test .getMinimum (0), "xmin");
assertEquals(xmax, test .getMaximum (0), "xmax");
assertEquals(ymin, test .getMinimum (1), "ymin");
assertEquals(ymax, test .getMaximum (1), "ymax");
assertEquals(ymin, lower.getOrdinate (1), "ymin");
assertEquals(ymax, upper.getOrdinate (1), "ymax");
if (test instanceof Envelope2D ri) {
assertEquals(xmin, ri.getMinX(), "xmin");
assertEquals(xmax, ri.getMaxX(), "xmax");
assertEquals(ymin, ri.getMinY(), "ymin");
assertEquals(ymax, ri.getMaxY(), "ymax");
}
}
/**
* Asserts that the intersection of the two following envelopes is equal to the given rectangle.
* First, this method tests using the {@link Envelope2D} implementation. Then, it tests using the
* {@link GeneralEnvelope} implementation.
*/
private static void assertIntersectEquals(final GeneralEnvelope e1, final GeneralEnvelope e2,
final double xmin, final double ymin, final double xmax, final double ymax)
{
final Envelope2D r1 = new Envelope2D(e1);
final Envelope2D r2 = new Envelope2D(e2);
final Envelope2D ri = r1.createIntersection(r2);
assertFalse(r1.isEmpty(), "isEmpty");
assertEnvelopeEquals(ri, xmin, ymin, xmax, ymax);
assertEquals(ri, r2.createIntersection(r1), "Interchanged arguments.");
// Compares with GeneralEnvelope.
final GeneralEnvelope ei = new GeneralEnvelope(e1);
ei.intersect(e2);
assertFalse(e1.isEmpty(), "isEmpty");
assertEnvelopeEquals(ei, xmin, ymin, xmax, ymax);
assertTrue(ei.equals(ri, STRICT, false), "Using GeneralEnvelope.");
// Interchanges arguments.
ei.setEnvelope(e2);
ei.intersect(e1);
assertFalse(e1.isEmpty(), "isEmpty");
assertEnvelopeEquals(ei, xmin, ymin, xmax, ymax);
assertTrue(ei.equals(ri, STRICT, false), "Using GeneralEnvelope.");
}
/**
* Asserts that the union of the two following envelopes is equal to the given rectangle.
* First, this method tests using the {@link Envelope2D} implementation.
* Then, it tests using the {@link GeneralEnvelope} implementation.
*
* @param inf {@code true} if the range after union is infinite. The handling of such case is different for
* {@link GeneralEnvelope} than for {@link Envelope2D} because we cannot store infinite values in a
* reliable way in a {@link java.awt.geom.Rectangle2D} object, so we use NaN instead.
* @param exactlyOneAntiMeridianSpan {@code true} if one envelope spans the anti-meridian and the other does not.
*/
private static void assertUnionEquals(final GeneralEnvelope e1, final GeneralEnvelope e2,
final double xmin, final double ymin, final double xmax, final double ymax,
final boolean inf, final boolean exactlyOneAntiMeridianSpan)
{
final Envelope2D r1 = new Envelope2D(e1);
final Envelope2D r2 = new Envelope2D(e2);
final Envelope2D ri = r1.createUnion(r2);
assertEnvelopeEquals(ri, inf ? NaN : xmin, ymin, inf ? NaN : xmax, ymax);
assertEquals(ri, r2.createUnion(r1), "Interchanged arguments.");
// Compares with GeneralEnvelope.
final GeneralEnvelope ei = new GeneralEnvelope(e1);
ei.add(e2);
assertEnvelopeEquals(ei, xmin, ymin, xmax, ymax);
if (!inf) {
assertTrue(ei.equals(ri, STRICT, false), "Using GeneralEnvelope.");
}
// Interchanges arguments.
ei.setEnvelope(e2);
ei.add(e1);
if (inf && exactlyOneAntiMeridianSpan) {
assertEnvelopeEquals(ei, Double.NEGATIVE_INFINITY, ymin, Double.POSITIVE_INFINITY, ymax);
} else {
assertEnvelopeEquals(ei, xmin, ymin, xmax, ymax);
}
if (!inf) {
assertTrue(ei.equals(ri, STRICT, false), "Using GeneralEnvelope.");
}
}
/**
* Asserts that adding the given point to the given envelope produces the given result.
* First, this method tests using the {@link Envelope2D} implementation. Then, it tests
* using the {@link GeneralEnvelope} implementation.
*/
private static void assertAddEquals(final GeneralEnvelope e, final DirectPosition2D p,
final double xmin, final double ymin, final double xmax, final double ymax)
{
final Envelope2D r = new Envelope2D(e);
r.add(p);
assertEnvelopeEquals(r, xmin, ymin, xmax, ymax);
// Compares with GeneralEnvelope.
final GeneralEnvelope ec = new GeneralEnvelope(e);
ec.add(p);
assertEnvelopeEquals(ec, xmin, ymin, xmax, ymax);
assertTrue(ec.equals(r, STRICT, false), "Using GeneralEnvelope.");
}
/**
* Tests the {@link GeneralEnvelope#intersect(Envelope)} and
* {@link Envelope2D#createIntersection(Rectangle2D)} methods.
*/
@Test
public void testIntersection() {
// ┌─────────────┐
// │ ┌───────┐ │
// │ └───────┘ │
// └─────────────┘
final GeneralEnvelope e1 = create(20, -20, 80, 10);
final GeneralEnvelope e2 = create(40, -10, 62, 8);
assertIntersectEquals(e1, e2, 40, -10, 62, 8);
// ┌──────────┐
// │ ┌───────┼──┐
// │ └───────┼──┘
// └──────────┘
e1.setEnvelope(20, -20, 80, 12);
e2.setEnvelope(40, -10, 100, 30);
final double ymin=-10, ymax=12; // Will not change anymore
assertIntersectEquals(e1, e2, 40, ymin, 80, ymax);
// ────┐ ┌────
// ──┐ │ │ ┌──
// ──┘ │ │ └──
// ────┘ └────
e1.setRange(0, 80, 20);
e2.setRange(0, 100, 18);
assertIntersectEquals(e1, e2, 100, ymin, 18, ymax);
// ────┐ ┌────
// ────┼──┼─┐┌─
// ────┼──┼─┘└─
// ────┘ └────
e2.setRange(0, 100, 90);
assertIntersectEquals(e1, e2, 100, ymin, 20, ymax);
// ─────┐ ┌─────
// ┌─┼────┐ │
// └─┼────┘ │
// ─────┘ └─────
e2.setRange(0, 10, 30);
assertIntersectEquals(e1, e2, 10, ymin, 20, ymax);
// ──────────┐ ┌─────
// ┌────┐ │ │
// └────┘ │ │
// ──────────┘ └─────
e2.setRange(0, 10, 16);
assertIntersectEquals(e1, e2, 10, ymin, 16, ymax);
// ─────┐ ┌─────
// │ ┌─┐ │
// │ └─┘ │
// ─────┘ └─────
e2.setRange(0, 40, 60);
assertIntersectEquals(e1, e2, NaN, ymin, NaN, ymax);
// ─────┐ ┌─────
// ┌─┼─────┼─┐
// └─┼─────┼─┘
// ─────┘ └─────
e2.setRange(0, 10, 90);
assertIntersectEquals(e1, e2, NaN, ymin, NaN, ymax);
// ────────┬────────
// ┌─┼────┐
// └─┼────┘
// ────────┴────────
e1.setRange(0, 0.0, -0.0);
e2.setRange(0, -10, 30);
assertIntersectEquals(e1, e2, -10, ymin, 30, ymax);
// ┌───────────────┐
// │ │
// │ │
// └───────────────┘
e1.setRange(0, 0.0, -0.0);
e2.setRange(0, 0.0, -0.0);
assertIntersectEquals(e1, e2, 0.0, ymin, -0.0, ymax);
// Post-test verification, mostly for SubEnvelope.
verifyInvariants(e1);
verifyInvariants(e2);
}
/**
* Tests the {@link GeneralEnvelope#add(Envelope)} and
* {@link Envelope2D#createUnion(Rectangle2D)} methods.
*/
@Test
public void testUnion() {
// ┌─────────────┐
// │ ┌───────┐ │
// │ └───────┘ │
// └─────────────┘
final GeneralEnvelope e1 = create(20, -20, 80, 10);
final GeneralEnvelope e2 = create(40, -10, 62, 8);
assertUnionEquals(e1, e2, 20, -20, 80, 10, false, false);
// ┌──────────┐
// │ ┌───────┼──┐
// │ └───────┼──┘
// └──────────┘
e1.setEnvelope(20, -20, 80, 12);
e2.setEnvelope(40, -10, 100, 30);
final double ymin=-20, ymax=30; // Will not change anymore.
assertUnionEquals(e1, e2, 20, ymin, 100, ymax, false, false);
// ────┐ ┌────
// ──┐ │ │ ┌──
// ──┘ │ │ └──
// ────┘ └────
e1.setRange(0, 80, 20);
e2.setRange(0, 100, 18);
assertUnionEquals(e1, e2, 80, ymin, 20, ymax, false, false);
// ────┐ ┌────
// ────┼──┼─┐┌─
// ────┼──┼─┘└─
// ────┘ └────
e2.setRange(0, 100, 90);
assertUnionEquals(e1, e2, +0.0, ymin, -0.0, ymax, true, false);
// ─────┐ ┌─────
// ┌─┼────┐ │
// └─┼────┘ │
// ─────┘ └─────
e2.setRange(0, 10, 30);
assertUnionEquals(e1, e2, 80, ymin, 30, ymax, false, true);
// ──────────┐ ┌─────
// ┌────┐ │ │
// └────┘ │ │
// ──────────┘ └─────
e2.setRange(0, 10, 16);
assertUnionEquals(e1, e2, 80, ymin, 20, ymax, false, true);
// ─────┐ ┌─────
// │ ┌─┐ │
// │ └─┘ │
// ─────┘ └─────
e2.setRange(0, 41, 60);
assertUnionEquals(e1, e2, 41, ymin, 20, ymax, false, true);
// ─────┐ ┌─────
// ┌─┼─────┼─┐
// └─┼─────┼─┘
// ─────┘ └─────
e2.setRange(0, 10, 90);
assertUnionEquals(e1, e2, +0.0, ymin, -0.0, ymax, true, true);
// Post-test verification, mostly for SubEnvelope.
verifyInvariants(e1);
verifyInvariants(e2);
}
/**
* Tests {@link GeneralEnvelope#intersect(Envelope)} with NaN values.
*/
@Test
public void testIntersectionWithNaN() {
GeneralEnvelope e1 = create(20, -20, 80, 10);
GeneralEnvelope e2 = create(10, -30, 62, NaN);
e1.intersect(e2); assertEnvelopeEquals(e1, 20, -20, 62, 10); // ymin: unchanged
e2.intersect(e1); assertEnvelopeEquals(e2, 20, -20, 62, NaN); // ymin: -30 → -20
// Same test but NaN on the lower value.
e1 = create(20, -20, 80, 10);
e2 = create(10, NaN, 62, 8);
e1.intersect(e2); assertEnvelopeEquals(e1, 20, -20, 62, 8); // ymax: 10 → 8
e2.intersect(e1); assertEnvelopeEquals(e2, 20, NaN, 62, 8); // ymax: unchanged
// Similar test, but span anti-meridian.
e1 = create(80, -20, 20, 10);
e2 = create(30, -30, NaN, 15);
e1.intersect(e2); assertEnvelopeEquals(e1, 80, -20, 20, 10); // [x0 … x1] range unchanged.
e2.intersect(e1); assertEnvelopeEquals(e2, 30, -20, NaN, 10); // Idem.
// Same test, but NaN on the lower value.
e1 = create( 80, -20, 20, 10);
e2 = create(NaN, -30, 62, 15);
e1.intersect(e2); assertEnvelopeEquals(e1, 80, -20, 20, 10); // [x0 … x1] range unchanged.
e2.intersect(e1); assertEnvelopeEquals(e2, NaN, -20, 62, 10); // Idem.
}
/**
* Tests {@link GeneralEnvelope#add(Envelope)} with NaN values.
*/
@Test
public void testUnionWithNaN() {
GeneralEnvelope e1 = create(20, -20, 80, 10);
GeneralEnvelope e2 = create(10, -30, 62, NaN);
// Expect ymin to be updated even if ymax is NaN.
e1.add(e2); assertEnvelopeEquals(e1, 10, -30, 80, 10); // ymin: -20 → -30
e2.add(e1); assertEnvelopeEquals(e2, 10, -30, 80, NaN); // ymin: unchanged
// Same test but NaN on the lower value.
e1 = create(20, -20, 80, 10);
e2 = create(10, NaN, 62, 25);
e1.add(e2); assertEnvelopeEquals(e1, 10, -20, 80, 25); // ymax: 10 → 25
e2.add(e1); assertEnvelopeEquals(e2, 10, NaN, 80, 25); // ymax: unchanged
// Similar test, but span anti-meridian.
e1 = create(80, -20, 20, 10);
e2 = create(30, -30, NaN, 15);
e1.add(e2); assertEnvelopeEquals(e1, 80, -30, 20, 15); // [x0 … x1] range unchanged.
e2.add(e1); assertEnvelopeEquals(e2, 30, -30, NaN, 15); // Idem.
// Same test, but NaN on the lower value.
e1 = create( 80, -20, 20, 10);
e2 = create(NaN, -30, 62, 15);
e1.add(e2); assertEnvelopeEquals(e1, 80, -30, 20, 15); // [x0 … x1] range unchanged.
e2.add(e1); assertEnvelopeEquals(e2, NaN, -30, 62, 15); // Idem.
}
/**
* Tests the {@link GeneralEnvelope#add(DirectPosition)} and
* {@link Envelope2D#add(Point2D)} methods.
*/
@Test
public void testAddPoint() {
final double ymin=-20, ymax=30; // Will not change anymore
final GeneralEnvelope e = create(20, ymin, 80, ymax);
final DirectPosition2D p = new DirectPosition2D(40, 15);
assertAddEquals(e, p, 20, ymin, 80, ymax);
p.x = 100; // Add on the right side.
assertAddEquals(e, p, 20, ymin, 100, ymax);
p.x = -10; // Add on the left side.
assertAddEquals(e, p, -10, ymin, 80, ymax);
e.setRange(0, 80, 20);
p.x = 100; // No change expected.
assertAddEquals(e, p, 80, ymin, 20, ymax);
p.x = 70; // Add on the right side.
assertAddEquals(e, p, 70, ymin, 20, ymax);
p.x = 30; // Add on the left side.
assertAddEquals(e, p, 80, ymin, 30, ymax);
verifyInvariants(e);
}
/**
* Tests the {@link GeneralEnvelope#normalize()} method.
*/
@Test
public void testNormalize() {
skipValidation = true;
GeneralEnvelope e = create(-100, -100, +100, +100);
assertTrue(e.normalize());
assertEnvelopeEquals(e, -100, -90, +100, +90);
e = create(185, 10, 190, 20);
assertTrue(e.normalize());
assertEnvelopeEquals(e, -175, 10, -170, 20);
e = create(175, 10, 185, 20);
assertTrue(e.normalize());
assertEnvelopeEquals(e, 175, 10, -175, 20);
e = create(0, 10, 360, 20);
assertTrue(e.normalize());
assertTrue(MathFunctions.isPositiveZero(e.getLower(0)), "Expect positive zero");
assertTrue(MathFunctions.isNegativeZero(e.getUpper(0)), "Expect negative zero");
verifyInvariants(e);
}
/**
* Tests the {@link GeneralEnvelope#normalize()} method
* with an envelope having more then 360° of longitude.
*/
@Test
public void testNormalizeWorld() {
GeneralEnvelope e = create(-195, -90, +170, +90); // -195° is equivalent to 165°
assertTrue(e.normalize());
assertEnvelopeEquals(e, -180, -90, +180, +90);
verifyInvariants(e);
}
/**
* Tests the {@link GeneralEnvelope#simplify()} method.
*/
@Test
public void testSimplify() {
// Normal envelope: no change expected.
GeneralEnvelope e = create(-100, -10, +100, +10);
assertFalse(e.simplify());
assertEnvelopeEquals(e, -100, -10, +100, +10);
// Anti-meridian crossing: should substitute [-180 … 180]°
e = create(30, -10, -60, 10);
assertTrue(e.simplify());
assertEnvelopeEquals(e, -180, -10, 180, 10);
// Anti-meridian crossing using positive and negative zero.
e = create(0.0, -10, -0.0, 10);
assertTrue(e.simplify());
assertEnvelopeEquals(e, -180, -10, 180, 10);
verifyInvariants(e);
}
/**
* Tests the {@link GeneralEnvelope#wraparound(WraparoundMethod)} method.
*/
@Test
public void tesWraparound() {
GeneralEnvelope e = create(30, -10, -60, 10);
assertTrue(e.wraparound(WraparoundMethod.CONTIGUOUS));
assertEnvelopeEquals(e, 30, -10, 300, 10);
e = create(30, -10, -15, 10);
assertTrue(e.wraparound(WraparoundMethod.CONTIGUOUS));
assertEnvelopeEquals(e, -330, -10, -15, 10);
}
/**
* Tests {@link GeneralEnvelope#setEnvelope(Envelope)}.
*/
@Test
public void testCopy() {
final GeneralEnvelope e = create(2, -4, 3, -3);
e.setEnvelope(create(3, -5, -8, 2));
assertEnvelopeEquals(e, 3, -5, -8, 2);
verifyInvariants(e);
/*
* Tests with a different implementation, for testing another code path.
* Constructor argument are (x, y, width, height).
*/
e.setEnvelope(new Envelope2D(null, -2, 3, 8, 5));
assertEnvelopeEquals(e, -2, 3, 6, 8);
verifyInvariants(e);
}
/**
* Tests {@link GeneralEnvelope#setEnvelope(double...)} with valid ranges,
* then with a range which is known to be invalid.
*/
@Test
public void testSetEnvelope() {
final GeneralEnvelope e = create(2, -4, 3, -3);
e.setEnvelope(3, -5, -8, 2);
var ex = assertThrows(IllegalArgumentException.class,
() -> e.setEnvelope(1, -10, 2, -20),
"Invalid range shall not be allowed.");
assertMessageContains(ex, AxisNames.GEODETIC_LATITUDE);
// Verify that the envelope still have the old values.
assertEnvelopeEquals(e, 3, -5, -8, 2);
verifyInvariants(e);
}
/**
* Tests {@link GeneralEnvelope#setRange(int, double, double)} with a valid range,
* then with a range which is known to be invalid.
*/
@Test
public void testSetRange() {
final GeneralEnvelope e = create(2, -4, 3, -3);
e.setRange(1, -5, 2);
var ex = assertThrows(IllegalArgumentException.class,
() -> e.setRange(1, -10, -20),
"Invalid range shall not be allowed.");
assertMessageContains(ex, AxisNames.GEODETIC_LATITUDE);
// Verify that the envelope still have the old values.
assertEnvelopeEquals(e, 2, -5, 3, 2);
verifyInvariants(e);
}
/**
* Tests {@link GeneralEnvelope#setCoordinateReferenceSystem(CoordinateReferenceSystem)}.
*/
@Test
public void testSetCoordinateReferenceSystem() {
final GeneralEnvelope e = create(2, -4, 3, -3);
e.setCoordinateReferenceSystem(null);
/*
* Set an invalid latitude range, but the Envelope cannot known that fact without CRS.
* Only when we will specify the CRS, the envelope will realize that it contains an
* invalid range.
*/
e.setRange(1, -10, -20);
var ex = assertThrows(IllegalStateException.class,
() -> e.setCoordinateReferenceSystem(WGS84),
"Invalid range shall not be allowed.");
assertMessageContains(ex, AxisNames.GEODETIC_LATITUDE);
/*
* Verify that the envelope values are unchanged.
* Then fix the range and try again to set the CRS.
*/
assertEquals( 2, e.getLower(0));
assertEquals(-10, e.getLower(1));
assertEquals( 3, e.getUpper(0));
assertEquals(-20, e.getUpper(1));
e.setRange(1, -20, -10);
e.setCoordinateReferenceSystem(WGS84);
assertEnvelopeEquals(e, 2, -20, 3, -10);
verifyInvariants(e);
}
/**
* Tests modifying the corner of an envelope.
*/
@Test
public void testCornerModifications() {
final GeneralEnvelope e = create(2, -4, 3, -3);
e.getLowerCorner().setOrdinate(0, 1);
e.getUpperCorner().setOrdinate(1, -1);
assertEquals( 1, e.getLower(0));
assertEquals(-4, e.getLower(1));
assertEquals( 3, e.getUpper(0));
assertEquals(-1, e.getUpper(1));
verifyInvariants(e);
}
/**
* Tests {@link GeneralEnvelope#translate(double...)}.
*/
@Test
public void testTranslate() {
final GeneralEnvelope envelope = new GeneralEnvelope(new double[] {4, 5}, new double[] {8, 7});
envelope.translate(2, -4);
assertEnvelopeEquals(envelope, 6, 1, 10, 3);
}
/**
* Tests {@link GeneralEnvelope#horizontal()}.
*/
@Test
public void testHorizontal() {
GeneralEnvelope envelope = new GeneralEnvelope(new double[] {4, 12, 5, -8}, new double[] {8, 19, 7, -3});
envelope.setCoordinateReferenceSystem(HardCodedCRS.GEOID_4D_MIXED_ORDER);
envelope = envelope.horizontal();
assertEnvelopeEquals(envelope, 5, -8, 7, -3);
assertSame(WGS84_LATITUDE_FIRST, envelope.getCoordinateReferenceSystem());
}
/**
* Tests {@link GeneralEnvelope#getTimeRange()} and {@link GeneralEnvelope#setTimeRange(Instant, Instant)}.
* The temporal coordinates in this test are days elapsed since November 17, 1858 at 00:00 UTC.
*/
@Test
public void testTimeRange() {
final GeneralEnvelope envelope = new GeneralEnvelope(HardCodedCRS.WGS84_WITH_TIME);
envelope.setRange(0, -20, 25);
envelope.setRange(1, -30, 12);
envelope.setRange(2, 58840, 59000.75);
final Range<Instant> range = envelope.getTimeRange().get();
assertEquals(Instant.parse("2019-12-23T00:00:00Z"), range.getMinValue());
assertEquals(Instant.parse("2020-05-31T18:00:00Z"), range.getMaxValue());
envelope.setTimeRange(Instant.parse("2015-04-10T06:00:00Z"),
Instant.parse("2018-12-29T12:00:00Z"));
assertArrayEquals(new double[] {-20, -30, 57122.25}, envelope.getLowerCorner().getCoordinate());
assertArrayEquals(new double[] { 25, 12, 58481.50}, envelope.getUpperCorner().getCoordinate());
}
/**
* Tests the {@link GeneralEnvelope#toString()} method.
*/
@Test
public void testToString() {
GeneralEnvelope envelope = new GeneralEnvelope(new double[] {-180, -90}, new double[] {180, 90});
assertEquals("BOX(-180 -90, 180 90)", envelope.toString());
envelope = new GeneralEnvelope(3);
envelope.setRange(0, -180, +180);
envelope.setRange(1, -90, +90);
envelope.setRange(2, 10, 30);
assertEquals("BOX3D(-180 -90 10, 180 90 30)", envelope.toString());
}
/**
* Tests the {@link GeneralEnvelope#GeneralEnvelope(CharSequence)} constructor.
*/
@Test
@SuppressWarnings("ResultOfObjectAllocationIgnored")
public void testWktParsing() {
GeneralEnvelope envelope = new GeneralEnvelope("BOX(-180 -90,180 90)");
assertEquals(2, envelope.getDimension());
assertEquals(-180, envelope.getLower(0));
assertEquals( 180, envelope.getUpper(0));
assertEquals( -90, envelope.getLower(1));
assertEquals( 90, envelope.getUpper(1));
validate(envelope);
envelope = new GeneralEnvelope("BOX3D(-180 -90 10, 180 90 30)");
assertEquals(3, envelope.getDimension());
assertEquals(-180, envelope.getLower(0));
assertEquals( 180, envelope.getUpper(0));
assertEquals( -90, envelope.getLower(1));
assertEquals( 90, envelope.getUpper(1));
assertEquals( 10, envelope.getLower(2));
assertEquals( 30, envelope.getUpper(2));
validate(envelope);
envelope = new GeneralEnvelope("POLYGON((-80 -30,-100 40,80 40,100 -40,-80 -30))");
assertEquals(-100, envelope.getLower(0));
assertEquals( 100, envelope.getUpper(0));
assertEquals( -40, envelope.getLower(1));
assertEquals( 40, envelope.getUpper(1));
validate(envelope);
assertEquals("BOX(6 10, 6 10)", new GeneralEnvelope("POINT(6 10)").toString());
assertEquals("BOX3D(6 10 3, 6 10 3)", new GeneralEnvelope("POINT M [ 6 10 3 ] ").toString());
assertEquals("BOX(3 4, 20 50)", new GeneralEnvelope("LINESTRING(3 4,10 50,20 25)").toString());
assertEquals("BOX(1 1, 6 5)", new GeneralEnvelope(
"MULTIPOLYGON(((1 1,5 1,5 5,1 5,1 1),(2 2, 3 2, 3 3, 2 3,2 2)),((3 3,6 2,6 4,3 3)))").toString());
assertEquals("BOX(3 6, 7 10)", new GeneralEnvelope("GEOMETRYCOLLECTION(POINT(4 6),LINESTRING(3 8,7 10))").toString());
assertEquals(0, new GeneralEnvelope("BOX()").getDimension());
try {
new GeneralEnvelope("BOX(3 4");
fail("Parsing should fails because of missing parenthesis.");
} catch (IllegalArgumentException e) {
// This is the expected exception.
assertTrue(e.getMessage().contains("BOX"));
}
try {
new GeneralEnvelope("LINESTRING(3 4,10 50),20 25)");
fail("Parsing should fails because of missing parenthesis.");
} catch (IllegalArgumentException e) {
// This is the expected exception.
assertTrue(e.getMessage().contains("LINESTRING"));
}
}
/**
* Tests {@link GeneralEnvelope#toWKT()} on a {@link GeneralEnvelope}.
*/
@Test
public void testWktFormatting() {
final GeneralEnvelope envelope = new GeneralEnvelope(3);
envelope.setRange(0, 6, 10);
envelope.setRange(1, 16, 20);
envelope.setRange(2, 23, 50);
assertWktEquals(Convention.WKT2, // Actually not in WKT2 standard.
"BOX3D[ 6 16 23,\n" +
" 10 20 50]", envelope);
}
/**
* Tests the {@link GeneralEnvelope#equals(Object)} and
* {@link GeneralEnvelope#equals(Envelope, double, boolean)} methods.
*/
@Test
public void testEquals() {
/*
* Initializes an empty envelope. The new envelope is empty
* but not null because initialized to 0, not NaN.
*/
final GeneralEnvelope e1 = new GeneralEnvelope(4);
assertTrue (e1.isEmpty());
assertFalse (e1.isAllNaN());
assertEquals(e1.getLowerCorner(), e1.getUpperCorner());
/*
* Initializes with arbitrary coordinate values.
* Should not be empty anymore.
*/
for (int i=e1.getDimension(); --i>=0;) {
e1.setRange(i, i*5 + 2, i*6 + 5);
}
assertFalse(e1.isAllNaN ());
assertFalse(e1.isEmpty());
assertNotEquals(e1.getLowerCorner(), e1.getUpperCorner());
/*
* Creates a new envelope initialized with the same
* coordinate values. The two envelope shall be equal.
*/
final GeneralEnvelope e2 = new GeneralEnvelope(e1);
assertPositionEquals(e1.getLowerCorner(), e2.getLowerCorner());
assertPositionEquals(e1.getUpperCorner(), e2.getUpperCorner());
assertTrue (e1.contains(e2, true ));
assertFalse (e1.contains(e2, false));
assertNotSame(e1, e2);
assertEquals (e1, e2);
assertTrue (e1.equals(e2, EPS, true ));
assertTrue (e1.equals(e2, EPS, false));
assertEquals (e1.hashCode(), e2.hashCode());
/*
* Offset slightly some coordinate value. Should not be equal anymore,
* except when comparing with a tolerance value.
*/
e2.setRange(2, e2.getLower(2) + 3E-5, e2.getUpper(2) - 3E-5);
assertTrue (e1.contains(e2, true ));
assertFalse(e1.contains(e2, false));
assertFalse(e1.equals (e2));
assertTrue (e1.equals (e2, EPS, true ));
assertTrue (e1.equals (e2, EPS, false));
assertNotEquals(e1.hashCode(), e2.hashCode());
/*
* Applies a greater offset. Should not be equal,
* even when comparing with a tolerance value.
*/
e2.setRange(1, e2.getLower(1) + 1.5, e2.getUpper(1) - 1.5);
assertTrue (e1.contains(e2, true ));
assertFalse(e1.contains(e2, false));
assertFalse(e1.equals (e2));
assertFalse(e1.equals (e2, EPS, true ));
assertFalse(e1.equals (e2, EPS, false));
assertNotEquals(e1.hashCode(), e2.hashCode());
}
/**
* Compares the specified corners.
*/
private static void assertPositionEquals(final DirectPosition p1, final DirectPosition p2) {
assertNotSame(p1, p2);
assertEquals (p1, p2);
assertEquals (p1.hashCode(), p2.hashCode());
}
/**
* Tests the {@link GeneralEnvelope#clone()} method.
*/
@Test
public void testClone() {
final GeneralEnvelope e1 = new GeneralEnvelope(2);
e1.setRange(0, -40, +60);
e1.setRange(1, -20, +30);
final GeneralEnvelope e2 = e1.clone();
validate(e2);
assertNotSame(e1, e2);
assertEquals (e1, e2);
e1.setRange(0, -40, +61);
assertNotEquals(e1, e2, "Coordinates array should have been cloned.");
e2.setRange(0, -40, +61);
assertEquals(e1, e2);
}
/**
* Tests {@code GeneralEnvelope} serialization.
*/
@Test
public final void testSerialization() {
final GeneralEnvelope e1 = create(-20, -10, 20, 10);
final GeneralEnvelope e2 = assertSerializedEquals(e1);
assertNotSame(e1, e2);
validate(e2);
}
}
|
google/j2objc | 36,166 | jre_emul/android/platform/libcore/jsr166-tests/src/test/java/jsr166/RecursiveTaskTest.java | /*
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
* http://creativecommons.org/publicdomain/zero/1.0/
*/
package jsr166;
import static java.util.concurrent.TimeUnit.SECONDS;
import java.util.HashSet;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.ForkJoinTask;
import java.util.concurrent.RecursiveTask;
import java.util.concurrent.TimeoutException;
import junit.framework.Test;
import junit.framework.TestSuite;
public class RecursiveTaskTest extends JSR166TestCase {
// android-note: Removed because the CTS runner does a bad job of
// retrying tests that have suite() declarations.
//
// public static void main(String[] args) {
// main(suite(), args);
// }
// public static Test suite() {
// return new TestSuite(RecursiveTaskTest.class);
// }
private static ForkJoinPool mainPool() {
return new ForkJoinPool();
}
private static ForkJoinPool singletonPool() {
return new ForkJoinPool(1);
}
private static ForkJoinPool asyncSingletonPool() {
return new ForkJoinPool(1,
ForkJoinPool.defaultForkJoinWorkerThreadFactory,
null, true);
}
private <T> T testInvokeOnPool(ForkJoinPool pool, RecursiveTask<T> a) {
try (PoolCleaner cleaner = cleaner(pool)) {
checkNotDone(a);
T result = pool.invoke(a);
checkCompletedNormally(a, result);
return result;
}
}
void checkNotDone(RecursiveTask a) {
assertFalse(a.isDone());
assertFalse(a.isCompletedNormally());
assertFalse(a.isCompletedAbnormally());
assertFalse(a.isCancelled());
assertNull(a.getException());
assertNull(a.getRawResult());
if (! ForkJoinTask.inForkJoinPool()) {
Thread.currentThread().interrupt();
try {
a.get();
shouldThrow();
} catch (InterruptedException success) {
} catch (Throwable fail) { threadUnexpectedException(fail); }
Thread.currentThread().interrupt();
try {
a.get(5L, SECONDS);
shouldThrow();
} catch (InterruptedException success) {
} catch (Throwable fail) { threadUnexpectedException(fail); }
}
try {
a.get(0L, SECONDS);
shouldThrow();
} catch (TimeoutException success) {
} catch (Throwable fail) { threadUnexpectedException(fail); }
}
<T> void checkCompletedNormally(RecursiveTask<T> a, T expected) {
assertTrue(a.isDone());
assertFalse(a.isCancelled());
assertTrue(a.isCompletedNormally());
assertFalse(a.isCompletedAbnormally());
assertNull(a.getException());
assertSame(expected, a.getRawResult());
assertSame(expected, a.join());
assertFalse(a.cancel(false));
assertFalse(a.cancel(true));
try {
assertSame(expected, a.get());
} catch (Throwable fail) { threadUnexpectedException(fail); }
try {
assertSame(expected, a.get(5L, SECONDS));
} catch (Throwable fail) { threadUnexpectedException(fail); }
}
/**
* Waits for the task to complete, and checks that when it does,
* it will have an Integer result equals to the given int.
*/
void checkCompletesNormally(RecursiveTask<Integer> a, int expected) {
Integer r = a.join();
assertEquals(expected, (int) r);
checkCompletedNormally(a, r);
}
/**
* Like checkCompletesNormally, but verifies that the task has
* already completed.
*/
void checkCompletedNormally(RecursiveTask<Integer> a, int expected) {
Integer r = a.getRawResult();
assertEquals(expected, (int) r);
checkCompletedNormally(a, r);
}
void checkCancelled(RecursiveTask a) {
assertTrue(a.isDone());
assertTrue(a.isCancelled());
assertFalse(a.isCompletedNormally());
assertTrue(a.isCompletedAbnormally());
assertTrue(a.getException() instanceof CancellationException);
assertNull(a.getRawResult());
try {
a.join();
shouldThrow();
} catch (CancellationException success) {
} catch (Throwable fail) { threadUnexpectedException(fail); }
try {
a.get();
shouldThrow();
} catch (CancellationException success) {
} catch (Throwable fail) { threadUnexpectedException(fail); }
try {
a.get(5L, SECONDS);
shouldThrow();
} catch (CancellationException success) {
} catch (Throwable fail) { threadUnexpectedException(fail); }
}
void checkCompletedAbnormally(RecursiveTask a, Throwable t) {
assertTrue(a.isDone());
assertFalse(a.isCancelled());
assertFalse(a.isCompletedNormally());
assertTrue(a.isCompletedAbnormally());
assertSame(t.getClass(), a.getException().getClass());
assertNull(a.getRawResult());
assertFalse(a.cancel(false));
assertFalse(a.cancel(true));
try {
a.join();
shouldThrow();
} catch (Throwable expected) {
assertSame(t.getClass(), expected.getClass());
}
try {
a.get();
shouldThrow();
} catch (ExecutionException success) {
assertSame(t.getClass(), success.getCause().getClass());
} catch (Throwable fail) { threadUnexpectedException(fail); }
try {
a.get(5L, SECONDS);
shouldThrow();
} catch (ExecutionException success) {
assertSame(t.getClass(), success.getCause().getClass());
} catch (Throwable fail) { threadUnexpectedException(fail); }
}
public static final class FJException extends RuntimeException {
public FJException() { super(); }
}
// An invalid return value for Fib
static final Integer NoResult = Integer.valueOf(-17);
// A simple recursive task for testing
final class FibTask extends CheckedRecursiveTask<Integer> {
final int number;
FibTask(int n) { number = n; }
public Integer realCompute() {
int n = number;
if (n <= 1)
return n;
FibTask f1 = new FibTask(n - 1);
f1.fork();
return (new FibTask(n - 2)).compute() + f1.join();
}
public void publicSetRawResult(Integer result) {
setRawResult(result);
}
}
// A recursive action failing in base case
final class FailingFibTask extends RecursiveTask<Integer> {
final int number;
int result;
FailingFibTask(int n) { number = n; }
public Integer compute() {
int n = number;
if (n <= 1)
throw new FJException();
FailingFibTask f1 = new FailingFibTask(n - 1);
f1.fork();
return (new FibTask(n - 2)).compute() + f1.join();
}
}
/**
* invoke returns value when task completes normally.
* isCompletedAbnormally and isCancelled return false for normally
* completed tasks. getRawResult of a completed non-null task
* returns value;
*/
public void testInvoke() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FibTask f = new FibTask(8);
Integer r = f.invoke();
assertEquals(21, (int) r);
checkCompletedNormally(f, r);
return r;
}};
assertEquals(21, (int) testInvokeOnPool(mainPool(), a));
}
/**
* quietlyInvoke task returns when task completes normally.
* isCompletedAbnormally and isCancelled return false for normally
* completed tasks
*/
public void testQuietlyInvoke() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FibTask f = new FibTask(8);
f.quietlyInvoke();
checkCompletedNormally(f, 21);
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(mainPool(), a));
}
/**
* join of a forked task returns when task completes
*/
public void testForkJoin() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FibTask f = new FibTask(8);
assertSame(f, f.fork());
Integer r = f.join();
assertEquals(21, (int) r);
checkCompletedNormally(f, r);
return r;
}};
assertEquals(21, (int) testInvokeOnPool(mainPool(), a));
}
/**
* get of a forked task returns when task completes
*/
public void testForkGet() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() throws Exception {
FibTask f = new FibTask(8);
assertSame(f, f.fork());
Integer r = f.get();
assertEquals(21, (int) r);
checkCompletedNormally(f, r);
return r;
}};
assertEquals(21, (int) testInvokeOnPool(mainPool(), a));
}
/**
* timed get of a forked task returns when task completes
*/
public void testForkTimedGet() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() throws Exception {
FibTask f = new FibTask(8);
assertSame(f, f.fork());
Integer r = f.get(5L, SECONDS);
assertEquals(21, (int) r);
checkCompletedNormally(f, r);
return r;
}};
assertEquals(21, (int) testInvokeOnPool(mainPool(), a));
}
/**
* quietlyJoin of a forked task returns when task completes
*/
public void testForkQuietlyJoin() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FibTask f = new FibTask(8);
assertSame(f, f.fork());
f.quietlyJoin();
Integer r = f.getRawResult();
assertEquals(21, (int) r);
checkCompletedNormally(f, r);
return r;
}};
assertEquals(21, (int) testInvokeOnPool(mainPool(), a));
}
/**
* helpQuiesce returns when tasks are complete.
* getQueuedTaskCount returns 0 when quiescent
*/
public void testForkHelpQuiesce() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FibTask f = new FibTask(8);
assertSame(f, f.fork());
helpQuiesce();
while (!f.isDone()) // wait out race
;
assertEquals(0, getQueuedTaskCount());
checkCompletedNormally(f, 21);
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(mainPool(), a));
}
/**
* invoke task throws exception when task completes abnormally
*/
public void testAbnormalInvoke() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FailingFibTask f = new FailingFibTask(8);
try {
f.invoke();
shouldThrow();
} catch (FJException success) {
checkCompletedAbnormally(f, success);
}
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(mainPool(), a));
}
/**
* quietlyInvoke task returns when task completes abnormally
*/
public void testAbnormalQuietlyInvoke() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FailingFibTask f = new FailingFibTask(8);
f.quietlyInvoke();
assertTrue(f.getException() instanceof FJException);
checkCompletedAbnormally(f, f.getException());
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(mainPool(), a));
}
/**
* join of a forked task throws exception when task completes abnormally
*/
public void testAbnormalForkJoin() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FailingFibTask f = new FailingFibTask(8);
assertSame(f, f.fork());
try {
Integer r = f.join();
shouldThrow();
} catch (FJException success) {
checkCompletedAbnormally(f, success);
}
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(mainPool(), a));
}
/**
* get of a forked task throws exception when task completes abnormally
*/
public void testAbnormalForkGet() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() throws Exception {
FailingFibTask f = new FailingFibTask(8);
assertSame(f, f.fork());
try {
Integer r = f.get();
shouldThrow();
} catch (ExecutionException success) {
Throwable cause = success.getCause();
assertTrue(cause instanceof FJException);
checkCompletedAbnormally(f, cause);
}
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(mainPool(), a));
}
/**
* timed get of a forked task throws exception when task completes abnormally
*/
public void testAbnormalForkTimedGet() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() throws Exception {
FailingFibTask f = new FailingFibTask(8);
assertSame(f, f.fork());
try {
Integer r = f.get(5L, SECONDS);
shouldThrow();
} catch (ExecutionException success) {
Throwable cause = success.getCause();
assertTrue(cause instanceof FJException);
checkCompletedAbnormally(f, cause);
}
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(mainPool(), a));
}
/**
* quietlyJoin of a forked task returns when task completes abnormally
*/
public void testAbnormalForkQuietlyJoin() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FailingFibTask f = new FailingFibTask(8);
assertSame(f, f.fork());
f.quietlyJoin();
assertTrue(f.getException() instanceof FJException);
checkCompletedAbnormally(f, f.getException());
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(mainPool(), a));
}
/**
* invoke task throws exception when task cancelled
*/
public void testCancelledInvoke() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FibTask f = new FibTask(8);
assertTrue(f.cancel(true));
try {
Integer r = f.invoke();
shouldThrow();
} catch (CancellationException success) {
checkCancelled(f);
}
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(mainPool(), a));
}
/**
* join of a forked task throws exception when task cancelled
*/
public void testCancelledForkJoin() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FibTask f = new FibTask(8);
assertTrue(f.cancel(true));
assertSame(f, f.fork());
try {
Integer r = f.join();
shouldThrow();
} catch (CancellationException success) {
checkCancelled(f);
}
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(mainPool(), a));
}
/**
* get of a forked task throws exception when task cancelled
*/
public void testCancelledForkGet() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() throws Exception {
FibTask f = new FibTask(8);
assertTrue(f.cancel(true));
assertSame(f, f.fork());
try {
Integer r = f.get();
shouldThrow();
} catch (CancellationException success) {
checkCancelled(f);
}
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(mainPool(), a));
}
/**
* timed get of a forked task throws exception when task cancelled
*/
public void testCancelledForkTimedGet() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() throws Exception {
FibTask f = new FibTask(8);
assertTrue(f.cancel(true));
assertSame(f, f.fork());
try {
Integer r = f.get(5L, SECONDS);
shouldThrow();
} catch (CancellationException success) {
checkCancelled(f);
}
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(mainPool(), a));
}
/**
* quietlyJoin of a forked task returns when task cancelled
*/
public void testCancelledForkQuietlyJoin() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FibTask f = new FibTask(8);
assertTrue(f.cancel(true));
assertSame(f, f.fork());
f.quietlyJoin();
checkCancelled(f);
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(mainPool(), a));
}
/**
* getPool of executing task returns its pool
*/
public void testGetPool() {
final ForkJoinPool mainPool = mainPool();
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
assertSame(mainPool, getPool());
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(mainPool, a));
}
/**
* getPool of non-FJ task returns null
*/
public void testGetPool2() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
assertNull(getPool());
return NoResult;
}};
assertSame(NoResult, a.invoke());
}
/**
* inForkJoinPool of executing task returns true
*/
public void testInForkJoinPool() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
assertTrue(inForkJoinPool());
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(mainPool(), a));
}
/**
* inForkJoinPool of non-FJ task returns false
*/
public void testInForkJoinPool2() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
assertFalse(inForkJoinPool());
return NoResult;
}};
assertSame(NoResult, a.invoke());
}
/**
* The value set by setRawResult is returned by getRawResult
*/
public void testSetRawResult() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
setRawResult(NoResult);
assertSame(NoResult, getRawResult());
return NoResult;
}
};
assertSame(NoResult, a.invoke());
}
/**
* A reinitialized normally completed task may be re-invoked
*/
public void testReinitialize() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FibTask f = new FibTask(8);
checkNotDone(f);
for (int i = 0; i < 3; i++) {
Integer r = f.invoke();
assertEquals(21, (int) r);
checkCompletedNormally(f, r);
f.reinitialize();
f.publicSetRawResult(null);
checkNotDone(f);
}
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(mainPool(), a));
}
/**
* A reinitialized abnormally completed task may be re-invoked
*/
public void testReinitializeAbnormal() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FailingFibTask f = new FailingFibTask(8);
checkNotDone(f);
for (int i = 0; i < 3; i++) {
try {
f.invoke();
shouldThrow();
} catch (FJException success) {
checkCompletedAbnormally(f, success);
}
f.reinitialize();
checkNotDone(f);
}
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(mainPool(), a));
}
/**
* invoke task throws exception after invoking completeExceptionally
*/
public void testCompleteExceptionally() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FibTask f = new FibTask(8);
f.completeExceptionally(new FJException());
try {
Integer r = f.invoke();
shouldThrow();
} catch (FJException success) {
checkCompletedAbnormally(f, success);
}
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(mainPool(), a));
}
/**
* invoke task suppresses execution invoking complete
*/
public void testComplete() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FibTask f = new FibTask(8);
f.complete(NoResult);
Integer r = f.invoke();
assertSame(NoResult, r);
checkCompletedNormally(f, NoResult);
return r;
}};
assertSame(NoResult, testInvokeOnPool(mainPool(), a));
}
/**
* invokeAll(t1, t2) invokes all task arguments
*/
public void testInvokeAll2() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FibTask f = new FibTask(8);
FibTask g = new FibTask(9);
invokeAll(f, g);
checkCompletedNormally(f, 21);
checkCompletedNormally(g, 34);
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(mainPool(), a));
}
/**
* invokeAll(tasks) with 1 argument invokes task
*/
public void testInvokeAll1() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FibTask f = new FibTask(8);
invokeAll(f);
checkCompletedNormally(f, 21);
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(mainPool(), a));
}
/**
* invokeAll(tasks) with > 2 argument invokes tasks
*/
public void testInvokeAll3() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FibTask f = new FibTask(8);
FibTask g = new FibTask(9);
FibTask h = new FibTask(7);
invokeAll(f, g, h);
assertTrue(f.isDone());
assertTrue(g.isDone());
assertTrue(h.isDone());
checkCompletedNormally(f, 21);
checkCompletedNormally(g, 34);
checkCompletedNormally(h, 13);
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(mainPool(), a));
}
/**
* invokeAll(collection) invokes all tasks in the collection
*/
public void testInvokeAllCollection() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FibTask f = new FibTask(8);
FibTask g = new FibTask(9);
FibTask h = new FibTask(7);
HashSet set = new HashSet();
set.add(f);
set.add(g);
set.add(h);
invokeAll(set);
assertTrue(f.isDone());
assertTrue(g.isDone());
assertTrue(h.isDone());
checkCompletedNormally(f, 21);
checkCompletedNormally(g, 34);
checkCompletedNormally(h, 13);
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(mainPool(), a));
}
/**
* invokeAll(tasks) with any null task throws NPE
*/
public void testInvokeAllNPE() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FibTask f = new FibTask(8);
FibTask g = new FibTask(9);
FibTask h = null;
try {
invokeAll(f, g, h);
shouldThrow();
} catch (NullPointerException success) {}
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(mainPool(), a));
}
/**
* invokeAll(t1, t2) throw exception if any task does
*/
public void testAbnormalInvokeAll2() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FibTask f = new FibTask(8);
FailingFibTask g = new FailingFibTask(9);
try {
invokeAll(f, g);
shouldThrow();
} catch (FJException success) {
checkCompletedAbnormally(g, success);
}
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(mainPool(), a));
}
/**
* invokeAll(tasks) with 1 argument throws exception if task does
*/
public void testAbnormalInvokeAll1() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FailingFibTask g = new FailingFibTask(9);
try {
invokeAll(g);
shouldThrow();
} catch (FJException success) {
checkCompletedAbnormally(g, success);
}
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(mainPool(), a));
}
/**
* invokeAll(tasks) with > 2 argument throws exception if any task does
*/
public void testAbnormalInvokeAll3() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FibTask f = new FibTask(8);
FailingFibTask g = new FailingFibTask(9);
FibTask h = new FibTask(7);
try {
invokeAll(f, g, h);
shouldThrow();
} catch (FJException success) {
checkCompletedAbnormally(g, success);
}
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(mainPool(), a));
}
/**
* invokeAll(collection) throws exception if any task does
*/
public void testAbnormalInvokeAllCollection() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FailingFibTask f = new FailingFibTask(8);
FibTask g = new FibTask(9);
FibTask h = new FibTask(7);
HashSet set = new HashSet();
set.add(f);
set.add(g);
set.add(h);
try {
invokeAll(set);
shouldThrow();
} catch (FJException success) {
checkCompletedAbnormally(f, success);
}
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(mainPool(), a));
}
/**
* tryUnfork returns true for most recent unexecuted task,
* and suppresses execution
*/
public void testTryUnfork() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FibTask g = new FibTask(9);
assertSame(g, g.fork());
FibTask f = new FibTask(8);
assertSame(f, f.fork());
assertTrue(f.tryUnfork());
helpQuiesce();
checkNotDone(f);
checkCompletedNormally(g, 34);
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(singletonPool(), a));
}
/**
* getSurplusQueuedTaskCount returns > 0 when
* there are more tasks than threads
*/
public void testGetSurplusQueuedTaskCount() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FibTask h = new FibTask(7);
assertSame(h, h.fork());
FibTask g = new FibTask(9);
assertSame(g, g.fork());
FibTask f = new FibTask(8);
assertSame(f, f.fork());
assertTrue(getSurplusQueuedTaskCount() > 0);
helpQuiesce();
assertEquals(0, getSurplusQueuedTaskCount());
checkCompletedNormally(f, 21);
checkCompletedNormally(g, 34);
checkCompletedNormally(h, 13);
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(singletonPool(), a));
}
/**
* peekNextLocalTask returns most recent unexecuted task.
*/
public void testPeekNextLocalTask() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FibTask g = new FibTask(9);
assertSame(g, g.fork());
FibTask f = new FibTask(8);
assertSame(f, f.fork());
assertSame(f, peekNextLocalTask());
checkCompletesNormally(f, 21);
helpQuiesce();
checkCompletedNormally(g, 34);
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(singletonPool(), a));
}
/**
* pollNextLocalTask returns most recent unexecuted task
* without executing it
*/
public void testPollNextLocalTask() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FibTask g = new FibTask(9);
assertSame(g, g.fork());
FibTask f = new FibTask(8);
assertSame(f, f.fork());
assertSame(f, pollNextLocalTask());
helpQuiesce();
checkNotDone(f);
checkCompletedNormally(g, 34);
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(singletonPool(), a));
}
/**
* pollTask returns an unexecuted task without executing it
*/
public void testPollTask() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FibTask g = new FibTask(9);
assertSame(g, g.fork());
FibTask f = new FibTask(8);
assertSame(f, f.fork());
assertSame(f, pollTask());
helpQuiesce();
checkNotDone(f);
checkCompletedNormally(g, 34);
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(singletonPool(), a));
}
/**
* peekNextLocalTask returns least recent unexecuted task in async mode
*/
public void testPeekNextLocalTaskAsync() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FibTask g = new FibTask(9);
assertSame(g, g.fork());
FibTask f = new FibTask(8);
assertSame(f, f.fork());
assertSame(g, peekNextLocalTask());
assertEquals(21, (int) f.join());
helpQuiesce();
checkCompletedNormally(f, 21);
checkCompletedNormally(g, 34);
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(asyncSingletonPool(), a));
}
/**
* pollNextLocalTask returns least recent unexecuted task without
* executing it, in async mode
*/
public void testPollNextLocalTaskAsync() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FibTask g = new FibTask(9);
assertSame(g, g.fork());
FibTask f = new FibTask(8);
assertSame(f, f.fork());
assertSame(g, pollNextLocalTask());
helpQuiesce();
checkCompletedNormally(f, 21);
checkNotDone(g);
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(asyncSingletonPool(), a));
}
/**
* pollTask returns an unexecuted task without executing it, in
* async mode
*/
public void testPollTaskAsync() {
RecursiveTask<Integer> a = new CheckedRecursiveTask<Integer>() {
public Integer realCompute() {
FibTask g = new FibTask(9);
assertSame(g, g.fork());
FibTask f = new FibTask(8);
assertSame(f, f.fork());
assertSame(g, pollTask());
helpQuiesce();
checkCompletedNormally(f, 21);
checkNotDone(g);
return NoResult;
}};
assertSame(NoResult, testInvokeOnPool(asyncSingletonPool(), a));
}
}
|
googleads/google-ads-java | 36,185 | google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/services/DimensionOverlapResult.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v19/services/audience_insights_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v19.services;
/**
* <pre>
* A list of audience attributes of a single dimension, including their overlap
* with a primary attribute, returned as part of a
* [GenerateAudienceOverlapInsightsResponse][google.ads.googleads.v19.services.GenerateAudienceOverlapInsightsResponse].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.services.DimensionOverlapResult}
*/
public final class DimensionOverlapResult extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v19.services.DimensionOverlapResult)
DimensionOverlapResultOrBuilder {
private static final long serialVersionUID = 0L;
// Use DimensionOverlapResult.newBuilder() to construct.
private DimensionOverlapResult(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DimensionOverlapResult() {
dimension_ = 0;
items_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new DimensionOverlapResult();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v19_services_DimensionOverlapResult_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v19_services_DimensionOverlapResult_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.services.DimensionOverlapResult.class, com.google.ads.googleads.v19.services.DimensionOverlapResult.Builder.class);
}
public static final int DIMENSION_FIELD_NUMBER = 1;
private int dimension_ = 0;
/**
* <pre>
* The dimension of all the attributes in this section.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension dimension = 1;</code>
* @return The enum numeric value on the wire for dimension.
*/
@java.lang.Override public int getDimensionValue() {
return dimension_;
}
/**
* <pre>
* The dimension of all the attributes in this section.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension dimension = 1;</code>
* @return The dimension.
*/
@java.lang.Override public com.google.ads.googleads.v19.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension getDimension() {
com.google.ads.googleads.v19.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension result = com.google.ads.googleads.v19.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension.forNumber(dimension_);
return result == null ? com.google.ads.googleads.v19.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension.UNRECOGNIZED : result;
}
public static final int ITEMS_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private java.util.List<com.google.ads.googleads.v19.services.AudienceOverlapItem> items_;
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.AudienceOverlapItem items = 2;</code>
*/
@java.lang.Override
public java.util.List<com.google.ads.googleads.v19.services.AudienceOverlapItem> getItemsList() {
return items_;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.AudienceOverlapItem items = 2;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.ads.googleads.v19.services.AudienceOverlapItemOrBuilder>
getItemsOrBuilderList() {
return items_;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.AudienceOverlapItem items = 2;</code>
*/
@java.lang.Override
public int getItemsCount() {
return items_.size();
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.AudienceOverlapItem items = 2;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v19.services.AudienceOverlapItem getItems(int index) {
return items_.get(index);
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.AudienceOverlapItem items = 2;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v19.services.AudienceOverlapItemOrBuilder getItemsOrBuilder(
int index) {
return items_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (dimension_ != com.google.ads.googleads.v19.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension.UNSPECIFIED.getNumber()) {
output.writeEnum(1, dimension_);
}
for (int i = 0; i < items_.size(); i++) {
output.writeMessage(2, items_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (dimension_ != com.google.ads.googleads.v19.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension.UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(1, dimension_);
}
for (int i = 0; i < items_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, items_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v19.services.DimensionOverlapResult)) {
return super.equals(obj);
}
com.google.ads.googleads.v19.services.DimensionOverlapResult other = (com.google.ads.googleads.v19.services.DimensionOverlapResult) obj;
if (dimension_ != other.dimension_) return false;
if (!getItemsList()
.equals(other.getItemsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + DIMENSION_FIELD_NUMBER;
hash = (53 * hash) + dimension_;
if (getItemsCount() > 0) {
hash = (37 * hash) + ITEMS_FIELD_NUMBER;
hash = (53 * hash) + getItemsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v19.services.DimensionOverlapResult parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.DimensionOverlapResult parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.DimensionOverlapResult parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.DimensionOverlapResult parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.DimensionOverlapResult parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.DimensionOverlapResult parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.DimensionOverlapResult parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.DimensionOverlapResult parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.DimensionOverlapResult parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.DimensionOverlapResult parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.DimensionOverlapResult parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.DimensionOverlapResult parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v19.services.DimensionOverlapResult prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* A list of audience attributes of a single dimension, including their overlap
* with a primary attribute, returned as part of a
* [GenerateAudienceOverlapInsightsResponse][google.ads.googleads.v19.services.GenerateAudienceOverlapInsightsResponse].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.services.DimensionOverlapResult}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.services.DimensionOverlapResult)
com.google.ads.googleads.v19.services.DimensionOverlapResultOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v19_services_DimensionOverlapResult_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v19_services_DimensionOverlapResult_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.services.DimensionOverlapResult.class, com.google.ads.googleads.v19.services.DimensionOverlapResult.Builder.class);
}
// Construct using com.google.ads.googleads.v19.services.DimensionOverlapResult.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
dimension_ = 0;
if (itemsBuilder_ == null) {
items_ = java.util.Collections.emptyList();
} else {
items_ = null;
itemsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v19.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v19_services_DimensionOverlapResult_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.DimensionOverlapResult getDefaultInstanceForType() {
return com.google.ads.googleads.v19.services.DimensionOverlapResult.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v19.services.DimensionOverlapResult build() {
com.google.ads.googleads.v19.services.DimensionOverlapResult result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.DimensionOverlapResult buildPartial() {
com.google.ads.googleads.v19.services.DimensionOverlapResult result = new com.google.ads.googleads.v19.services.DimensionOverlapResult(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.ads.googleads.v19.services.DimensionOverlapResult result) {
if (itemsBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
items_ = java.util.Collections.unmodifiableList(items_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.items_ = items_;
} else {
result.items_ = itemsBuilder_.build();
}
}
private void buildPartial0(com.google.ads.googleads.v19.services.DimensionOverlapResult result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.dimension_ = dimension_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v19.services.DimensionOverlapResult) {
return mergeFrom((com.google.ads.googleads.v19.services.DimensionOverlapResult)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v19.services.DimensionOverlapResult other) {
if (other == com.google.ads.googleads.v19.services.DimensionOverlapResult.getDefaultInstance()) return this;
if (other.dimension_ != 0) {
setDimensionValue(other.getDimensionValue());
}
if (itemsBuilder_ == null) {
if (!other.items_.isEmpty()) {
if (items_.isEmpty()) {
items_ = other.items_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureItemsIsMutable();
items_.addAll(other.items_);
}
onChanged();
}
} else {
if (!other.items_.isEmpty()) {
if (itemsBuilder_.isEmpty()) {
itemsBuilder_.dispose();
itemsBuilder_ = null;
items_ = other.items_;
bitField0_ = (bitField0_ & ~0x00000002);
itemsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getItemsFieldBuilder() : null;
} else {
itemsBuilder_.addAllMessages(other.items_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
dimension_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 18: {
com.google.ads.googleads.v19.services.AudienceOverlapItem m =
input.readMessage(
com.google.ads.googleads.v19.services.AudienceOverlapItem.parser(),
extensionRegistry);
if (itemsBuilder_ == null) {
ensureItemsIsMutable();
items_.add(m);
} else {
itemsBuilder_.addMessage(m);
}
break;
} // case 18
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int dimension_ = 0;
/**
* <pre>
* The dimension of all the attributes in this section.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension dimension = 1;</code>
* @return The enum numeric value on the wire for dimension.
*/
@java.lang.Override public int getDimensionValue() {
return dimension_;
}
/**
* <pre>
* The dimension of all the attributes in this section.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension dimension = 1;</code>
* @param value The enum numeric value on the wire for dimension to set.
* @return This builder for chaining.
*/
public Builder setDimensionValue(int value) {
dimension_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* The dimension of all the attributes in this section.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension dimension = 1;</code>
* @return The dimension.
*/
@java.lang.Override
public com.google.ads.googleads.v19.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension getDimension() {
com.google.ads.googleads.v19.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension result = com.google.ads.googleads.v19.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension.forNumber(dimension_);
return result == null ? com.google.ads.googleads.v19.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension.UNRECOGNIZED : result;
}
/**
* <pre>
* The dimension of all the attributes in this section.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension dimension = 1;</code>
* @param value The dimension to set.
* @return This builder for chaining.
*/
public Builder setDimension(com.google.ads.googleads.v19.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
dimension_ = value.getNumber();
onChanged();
return this;
}
/**
* <pre>
* The dimension of all the attributes in this section.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension dimension = 1;</code>
* @return This builder for chaining.
*/
public Builder clearDimension() {
bitField0_ = (bitField0_ & ~0x00000001);
dimension_ = 0;
onChanged();
return this;
}
private java.util.List<com.google.ads.googleads.v19.services.AudienceOverlapItem> items_ =
java.util.Collections.emptyList();
private void ensureItemsIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
items_ = new java.util.ArrayList<com.google.ads.googleads.v19.services.AudienceOverlapItem>(items_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v19.services.AudienceOverlapItem, com.google.ads.googleads.v19.services.AudienceOverlapItem.Builder, com.google.ads.googleads.v19.services.AudienceOverlapItemOrBuilder> itemsBuilder_;
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.AudienceOverlapItem items = 2;</code>
*/
public java.util.List<com.google.ads.googleads.v19.services.AudienceOverlapItem> getItemsList() {
if (itemsBuilder_ == null) {
return java.util.Collections.unmodifiableList(items_);
} else {
return itemsBuilder_.getMessageList();
}
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.AudienceOverlapItem items = 2;</code>
*/
public int getItemsCount() {
if (itemsBuilder_ == null) {
return items_.size();
} else {
return itemsBuilder_.getCount();
}
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.AudienceOverlapItem items = 2;</code>
*/
public com.google.ads.googleads.v19.services.AudienceOverlapItem getItems(int index) {
if (itemsBuilder_ == null) {
return items_.get(index);
} else {
return itemsBuilder_.getMessage(index);
}
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.AudienceOverlapItem items = 2;</code>
*/
public Builder setItems(
int index, com.google.ads.googleads.v19.services.AudienceOverlapItem value) {
if (itemsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureItemsIsMutable();
items_.set(index, value);
onChanged();
} else {
itemsBuilder_.setMessage(index, value);
}
return this;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.AudienceOverlapItem items = 2;</code>
*/
public Builder setItems(
int index, com.google.ads.googleads.v19.services.AudienceOverlapItem.Builder builderForValue) {
if (itemsBuilder_ == null) {
ensureItemsIsMutable();
items_.set(index, builderForValue.build());
onChanged();
} else {
itemsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.AudienceOverlapItem items = 2;</code>
*/
public Builder addItems(com.google.ads.googleads.v19.services.AudienceOverlapItem value) {
if (itemsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureItemsIsMutable();
items_.add(value);
onChanged();
} else {
itemsBuilder_.addMessage(value);
}
return this;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.AudienceOverlapItem items = 2;</code>
*/
public Builder addItems(
int index, com.google.ads.googleads.v19.services.AudienceOverlapItem value) {
if (itemsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureItemsIsMutable();
items_.add(index, value);
onChanged();
} else {
itemsBuilder_.addMessage(index, value);
}
return this;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.AudienceOverlapItem items = 2;</code>
*/
public Builder addItems(
com.google.ads.googleads.v19.services.AudienceOverlapItem.Builder builderForValue) {
if (itemsBuilder_ == null) {
ensureItemsIsMutable();
items_.add(builderForValue.build());
onChanged();
} else {
itemsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.AudienceOverlapItem items = 2;</code>
*/
public Builder addItems(
int index, com.google.ads.googleads.v19.services.AudienceOverlapItem.Builder builderForValue) {
if (itemsBuilder_ == null) {
ensureItemsIsMutable();
items_.add(index, builderForValue.build());
onChanged();
} else {
itemsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.AudienceOverlapItem items = 2;</code>
*/
public Builder addAllItems(
java.lang.Iterable<? extends com.google.ads.googleads.v19.services.AudienceOverlapItem> values) {
if (itemsBuilder_ == null) {
ensureItemsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, items_);
onChanged();
} else {
itemsBuilder_.addAllMessages(values);
}
return this;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.AudienceOverlapItem items = 2;</code>
*/
public Builder clearItems() {
if (itemsBuilder_ == null) {
items_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
itemsBuilder_.clear();
}
return this;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.AudienceOverlapItem items = 2;</code>
*/
public Builder removeItems(int index) {
if (itemsBuilder_ == null) {
ensureItemsIsMutable();
items_.remove(index);
onChanged();
} else {
itemsBuilder_.remove(index);
}
return this;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.AudienceOverlapItem items = 2;</code>
*/
public com.google.ads.googleads.v19.services.AudienceOverlapItem.Builder getItemsBuilder(
int index) {
return getItemsFieldBuilder().getBuilder(index);
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.AudienceOverlapItem items = 2;</code>
*/
public com.google.ads.googleads.v19.services.AudienceOverlapItemOrBuilder getItemsOrBuilder(
int index) {
if (itemsBuilder_ == null) {
return items_.get(index); } else {
return itemsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.AudienceOverlapItem items = 2;</code>
*/
public java.util.List<? extends com.google.ads.googleads.v19.services.AudienceOverlapItemOrBuilder>
getItemsOrBuilderList() {
if (itemsBuilder_ != null) {
return itemsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(items_);
}
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.AudienceOverlapItem items = 2;</code>
*/
public com.google.ads.googleads.v19.services.AudienceOverlapItem.Builder addItemsBuilder() {
return getItemsFieldBuilder().addBuilder(
com.google.ads.googleads.v19.services.AudienceOverlapItem.getDefaultInstance());
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.AudienceOverlapItem items = 2;</code>
*/
public com.google.ads.googleads.v19.services.AudienceOverlapItem.Builder addItemsBuilder(
int index) {
return getItemsFieldBuilder().addBuilder(
index, com.google.ads.googleads.v19.services.AudienceOverlapItem.getDefaultInstance());
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.AudienceOverlapItem items = 2;</code>
*/
public java.util.List<com.google.ads.googleads.v19.services.AudienceOverlapItem.Builder>
getItemsBuilderList() {
return getItemsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v19.services.AudienceOverlapItem, com.google.ads.googleads.v19.services.AudienceOverlapItem.Builder, com.google.ads.googleads.v19.services.AudienceOverlapItemOrBuilder>
getItemsFieldBuilder() {
if (itemsBuilder_ == null) {
itemsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v19.services.AudienceOverlapItem, com.google.ads.googleads.v19.services.AudienceOverlapItem.Builder, com.google.ads.googleads.v19.services.AudienceOverlapItemOrBuilder>(
items_,
((bitField0_ & 0x00000002) != 0),
getParentForChildren(),
isClean());
items_ = null;
}
return itemsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.services.DimensionOverlapResult)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v19.services.DimensionOverlapResult)
private static final com.google.ads.googleads.v19.services.DimensionOverlapResult DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v19.services.DimensionOverlapResult();
}
public static com.google.ads.googleads.v19.services.DimensionOverlapResult getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DimensionOverlapResult>
PARSER = new com.google.protobuf.AbstractParser<DimensionOverlapResult>() {
@java.lang.Override
public DimensionOverlapResult parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<DimensionOverlapResult> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DimensionOverlapResult> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.DimensionOverlapResult getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 36,185 | google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/services/DimensionOverlapResult.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v20/services/audience_insights_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v20.services;
/**
* <pre>
* A list of audience attributes of a single dimension, including their overlap
* with a primary attribute, returned as part of a
* [GenerateAudienceOverlapInsightsResponse][google.ads.googleads.v20.services.GenerateAudienceOverlapInsightsResponse].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.services.DimensionOverlapResult}
*/
public final class DimensionOverlapResult extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v20.services.DimensionOverlapResult)
DimensionOverlapResultOrBuilder {
private static final long serialVersionUID = 0L;
// Use DimensionOverlapResult.newBuilder() to construct.
private DimensionOverlapResult(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DimensionOverlapResult() {
dimension_ = 0;
items_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new DimensionOverlapResult();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v20_services_DimensionOverlapResult_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v20_services_DimensionOverlapResult_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.services.DimensionOverlapResult.class, com.google.ads.googleads.v20.services.DimensionOverlapResult.Builder.class);
}
public static final int DIMENSION_FIELD_NUMBER = 1;
private int dimension_ = 0;
/**
* <pre>
* The dimension of all the attributes in this section.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension dimension = 1;</code>
* @return The enum numeric value on the wire for dimension.
*/
@java.lang.Override public int getDimensionValue() {
return dimension_;
}
/**
* <pre>
* The dimension of all the attributes in this section.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension dimension = 1;</code>
* @return The dimension.
*/
@java.lang.Override public com.google.ads.googleads.v20.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension getDimension() {
com.google.ads.googleads.v20.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension result = com.google.ads.googleads.v20.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension.forNumber(dimension_);
return result == null ? com.google.ads.googleads.v20.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension.UNRECOGNIZED : result;
}
public static final int ITEMS_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private java.util.List<com.google.ads.googleads.v20.services.AudienceOverlapItem> items_;
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.AudienceOverlapItem items = 2;</code>
*/
@java.lang.Override
public java.util.List<com.google.ads.googleads.v20.services.AudienceOverlapItem> getItemsList() {
return items_;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.AudienceOverlapItem items = 2;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.ads.googleads.v20.services.AudienceOverlapItemOrBuilder>
getItemsOrBuilderList() {
return items_;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.AudienceOverlapItem items = 2;</code>
*/
@java.lang.Override
public int getItemsCount() {
return items_.size();
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.AudienceOverlapItem items = 2;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v20.services.AudienceOverlapItem getItems(int index) {
return items_.get(index);
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.AudienceOverlapItem items = 2;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v20.services.AudienceOverlapItemOrBuilder getItemsOrBuilder(
int index) {
return items_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (dimension_ != com.google.ads.googleads.v20.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension.UNSPECIFIED.getNumber()) {
output.writeEnum(1, dimension_);
}
for (int i = 0; i < items_.size(); i++) {
output.writeMessage(2, items_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (dimension_ != com.google.ads.googleads.v20.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension.UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(1, dimension_);
}
for (int i = 0; i < items_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, items_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v20.services.DimensionOverlapResult)) {
return super.equals(obj);
}
com.google.ads.googleads.v20.services.DimensionOverlapResult other = (com.google.ads.googleads.v20.services.DimensionOverlapResult) obj;
if (dimension_ != other.dimension_) return false;
if (!getItemsList()
.equals(other.getItemsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + DIMENSION_FIELD_NUMBER;
hash = (53 * hash) + dimension_;
if (getItemsCount() > 0) {
hash = (37 * hash) + ITEMS_FIELD_NUMBER;
hash = (53 * hash) + getItemsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v20.services.DimensionOverlapResult parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.DimensionOverlapResult parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.DimensionOverlapResult parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.DimensionOverlapResult parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.DimensionOverlapResult parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.DimensionOverlapResult parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.DimensionOverlapResult parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.DimensionOverlapResult parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.DimensionOverlapResult parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.DimensionOverlapResult parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.DimensionOverlapResult parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.DimensionOverlapResult parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v20.services.DimensionOverlapResult prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* A list of audience attributes of a single dimension, including their overlap
* with a primary attribute, returned as part of a
* [GenerateAudienceOverlapInsightsResponse][google.ads.googleads.v20.services.GenerateAudienceOverlapInsightsResponse].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.services.DimensionOverlapResult}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.services.DimensionOverlapResult)
com.google.ads.googleads.v20.services.DimensionOverlapResultOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v20_services_DimensionOverlapResult_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v20_services_DimensionOverlapResult_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.services.DimensionOverlapResult.class, com.google.ads.googleads.v20.services.DimensionOverlapResult.Builder.class);
}
// Construct using com.google.ads.googleads.v20.services.DimensionOverlapResult.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
dimension_ = 0;
if (itemsBuilder_ == null) {
items_ = java.util.Collections.emptyList();
} else {
items_ = null;
itemsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v20.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v20_services_DimensionOverlapResult_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.DimensionOverlapResult getDefaultInstanceForType() {
return com.google.ads.googleads.v20.services.DimensionOverlapResult.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v20.services.DimensionOverlapResult build() {
com.google.ads.googleads.v20.services.DimensionOverlapResult result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.DimensionOverlapResult buildPartial() {
com.google.ads.googleads.v20.services.DimensionOverlapResult result = new com.google.ads.googleads.v20.services.DimensionOverlapResult(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.ads.googleads.v20.services.DimensionOverlapResult result) {
if (itemsBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
items_ = java.util.Collections.unmodifiableList(items_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.items_ = items_;
} else {
result.items_ = itemsBuilder_.build();
}
}
private void buildPartial0(com.google.ads.googleads.v20.services.DimensionOverlapResult result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.dimension_ = dimension_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v20.services.DimensionOverlapResult) {
return mergeFrom((com.google.ads.googleads.v20.services.DimensionOverlapResult)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v20.services.DimensionOverlapResult other) {
if (other == com.google.ads.googleads.v20.services.DimensionOverlapResult.getDefaultInstance()) return this;
if (other.dimension_ != 0) {
setDimensionValue(other.getDimensionValue());
}
if (itemsBuilder_ == null) {
if (!other.items_.isEmpty()) {
if (items_.isEmpty()) {
items_ = other.items_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureItemsIsMutable();
items_.addAll(other.items_);
}
onChanged();
}
} else {
if (!other.items_.isEmpty()) {
if (itemsBuilder_.isEmpty()) {
itemsBuilder_.dispose();
itemsBuilder_ = null;
items_ = other.items_;
bitField0_ = (bitField0_ & ~0x00000002);
itemsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getItemsFieldBuilder() : null;
} else {
itemsBuilder_.addAllMessages(other.items_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
dimension_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 18: {
com.google.ads.googleads.v20.services.AudienceOverlapItem m =
input.readMessage(
com.google.ads.googleads.v20.services.AudienceOverlapItem.parser(),
extensionRegistry);
if (itemsBuilder_ == null) {
ensureItemsIsMutable();
items_.add(m);
} else {
itemsBuilder_.addMessage(m);
}
break;
} // case 18
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int dimension_ = 0;
/**
* <pre>
* The dimension of all the attributes in this section.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension dimension = 1;</code>
* @return The enum numeric value on the wire for dimension.
*/
@java.lang.Override public int getDimensionValue() {
return dimension_;
}
/**
* <pre>
* The dimension of all the attributes in this section.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension dimension = 1;</code>
* @param value The enum numeric value on the wire for dimension to set.
* @return This builder for chaining.
*/
public Builder setDimensionValue(int value) {
dimension_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* The dimension of all the attributes in this section.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension dimension = 1;</code>
* @return The dimension.
*/
@java.lang.Override
public com.google.ads.googleads.v20.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension getDimension() {
com.google.ads.googleads.v20.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension result = com.google.ads.googleads.v20.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension.forNumber(dimension_);
return result == null ? com.google.ads.googleads.v20.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension.UNRECOGNIZED : result;
}
/**
* <pre>
* The dimension of all the attributes in this section.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension dimension = 1;</code>
* @param value The dimension to set.
* @return This builder for chaining.
*/
public Builder setDimension(com.google.ads.googleads.v20.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
dimension_ = value.getNumber();
onChanged();
return this;
}
/**
* <pre>
* The dimension of all the attributes in this section.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension dimension = 1;</code>
* @return This builder for chaining.
*/
public Builder clearDimension() {
bitField0_ = (bitField0_ & ~0x00000001);
dimension_ = 0;
onChanged();
return this;
}
private java.util.List<com.google.ads.googleads.v20.services.AudienceOverlapItem> items_ =
java.util.Collections.emptyList();
private void ensureItemsIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
items_ = new java.util.ArrayList<com.google.ads.googleads.v20.services.AudienceOverlapItem>(items_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v20.services.AudienceOverlapItem, com.google.ads.googleads.v20.services.AudienceOverlapItem.Builder, com.google.ads.googleads.v20.services.AudienceOverlapItemOrBuilder> itemsBuilder_;
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.AudienceOverlapItem items = 2;</code>
*/
public java.util.List<com.google.ads.googleads.v20.services.AudienceOverlapItem> getItemsList() {
if (itemsBuilder_ == null) {
return java.util.Collections.unmodifiableList(items_);
} else {
return itemsBuilder_.getMessageList();
}
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.AudienceOverlapItem items = 2;</code>
*/
public int getItemsCount() {
if (itemsBuilder_ == null) {
return items_.size();
} else {
return itemsBuilder_.getCount();
}
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.AudienceOverlapItem items = 2;</code>
*/
public com.google.ads.googleads.v20.services.AudienceOverlapItem getItems(int index) {
if (itemsBuilder_ == null) {
return items_.get(index);
} else {
return itemsBuilder_.getMessage(index);
}
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.AudienceOverlapItem items = 2;</code>
*/
public Builder setItems(
int index, com.google.ads.googleads.v20.services.AudienceOverlapItem value) {
if (itemsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureItemsIsMutable();
items_.set(index, value);
onChanged();
} else {
itemsBuilder_.setMessage(index, value);
}
return this;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.AudienceOverlapItem items = 2;</code>
*/
public Builder setItems(
int index, com.google.ads.googleads.v20.services.AudienceOverlapItem.Builder builderForValue) {
if (itemsBuilder_ == null) {
ensureItemsIsMutable();
items_.set(index, builderForValue.build());
onChanged();
} else {
itemsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.AudienceOverlapItem items = 2;</code>
*/
public Builder addItems(com.google.ads.googleads.v20.services.AudienceOverlapItem value) {
if (itemsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureItemsIsMutable();
items_.add(value);
onChanged();
} else {
itemsBuilder_.addMessage(value);
}
return this;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.AudienceOverlapItem items = 2;</code>
*/
public Builder addItems(
int index, com.google.ads.googleads.v20.services.AudienceOverlapItem value) {
if (itemsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureItemsIsMutable();
items_.add(index, value);
onChanged();
} else {
itemsBuilder_.addMessage(index, value);
}
return this;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.AudienceOverlapItem items = 2;</code>
*/
public Builder addItems(
com.google.ads.googleads.v20.services.AudienceOverlapItem.Builder builderForValue) {
if (itemsBuilder_ == null) {
ensureItemsIsMutable();
items_.add(builderForValue.build());
onChanged();
} else {
itemsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.AudienceOverlapItem items = 2;</code>
*/
public Builder addItems(
int index, com.google.ads.googleads.v20.services.AudienceOverlapItem.Builder builderForValue) {
if (itemsBuilder_ == null) {
ensureItemsIsMutable();
items_.add(index, builderForValue.build());
onChanged();
} else {
itemsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.AudienceOverlapItem items = 2;</code>
*/
public Builder addAllItems(
java.lang.Iterable<? extends com.google.ads.googleads.v20.services.AudienceOverlapItem> values) {
if (itemsBuilder_ == null) {
ensureItemsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, items_);
onChanged();
} else {
itemsBuilder_.addAllMessages(values);
}
return this;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.AudienceOverlapItem items = 2;</code>
*/
public Builder clearItems() {
if (itemsBuilder_ == null) {
items_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
itemsBuilder_.clear();
}
return this;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.AudienceOverlapItem items = 2;</code>
*/
public Builder removeItems(int index) {
if (itemsBuilder_ == null) {
ensureItemsIsMutable();
items_.remove(index);
onChanged();
} else {
itemsBuilder_.remove(index);
}
return this;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.AudienceOverlapItem items = 2;</code>
*/
public com.google.ads.googleads.v20.services.AudienceOverlapItem.Builder getItemsBuilder(
int index) {
return getItemsFieldBuilder().getBuilder(index);
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.AudienceOverlapItem items = 2;</code>
*/
public com.google.ads.googleads.v20.services.AudienceOverlapItemOrBuilder getItemsOrBuilder(
int index) {
if (itemsBuilder_ == null) {
return items_.get(index); } else {
return itemsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.AudienceOverlapItem items = 2;</code>
*/
public java.util.List<? extends com.google.ads.googleads.v20.services.AudienceOverlapItemOrBuilder>
getItemsOrBuilderList() {
if (itemsBuilder_ != null) {
return itemsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(items_);
}
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.AudienceOverlapItem items = 2;</code>
*/
public com.google.ads.googleads.v20.services.AudienceOverlapItem.Builder addItemsBuilder() {
return getItemsFieldBuilder().addBuilder(
com.google.ads.googleads.v20.services.AudienceOverlapItem.getDefaultInstance());
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.AudienceOverlapItem items = 2;</code>
*/
public com.google.ads.googleads.v20.services.AudienceOverlapItem.Builder addItemsBuilder(
int index) {
return getItemsFieldBuilder().addBuilder(
index, com.google.ads.googleads.v20.services.AudienceOverlapItem.getDefaultInstance());
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.AudienceOverlapItem items = 2;</code>
*/
public java.util.List<com.google.ads.googleads.v20.services.AudienceOverlapItem.Builder>
getItemsBuilderList() {
return getItemsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v20.services.AudienceOverlapItem, com.google.ads.googleads.v20.services.AudienceOverlapItem.Builder, com.google.ads.googleads.v20.services.AudienceOverlapItemOrBuilder>
getItemsFieldBuilder() {
if (itemsBuilder_ == null) {
itemsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v20.services.AudienceOverlapItem, com.google.ads.googleads.v20.services.AudienceOverlapItem.Builder, com.google.ads.googleads.v20.services.AudienceOverlapItemOrBuilder>(
items_,
((bitField0_ & 0x00000002) != 0),
getParentForChildren(),
isClean());
items_ = null;
}
return itemsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.services.DimensionOverlapResult)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v20.services.DimensionOverlapResult)
private static final com.google.ads.googleads.v20.services.DimensionOverlapResult DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v20.services.DimensionOverlapResult();
}
public static com.google.ads.googleads.v20.services.DimensionOverlapResult getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DimensionOverlapResult>
PARSER = new com.google.protobuf.AbstractParser<DimensionOverlapResult>() {
@java.lang.Override
public DimensionOverlapResult parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<DimensionOverlapResult> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DimensionOverlapResult> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.DimensionOverlapResult getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 36,185 | google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/services/DimensionOverlapResult.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v21/services/audience_insights_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v21.services;
/**
* <pre>
* A list of audience attributes of a single dimension, including their overlap
* with a primary attribute, returned as part of a
* [GenerateAudienceOverlapInsightsResponse][google.ads.googleads.v21.services.GenerateAudienceOverlapInsightsResponse].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.services.DimensionOverlapResult}
*/
public final class DimensionOverlapResult extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v21.services.DimensionOverlapResult)
DimensionOverlapResultOrBuilder {
private static final long serialVersionUID = 0L;
// Use DimensionOverlapResult.newBuilder() to construct.
private DimensionOverlapResult(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DimensionOverlapResult() {
dimension_ = 0;
items_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new DimensionOverlapResult();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v21_services_DimensionOverlapResult_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v21_services_DimensionOverlapResult_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.services.DimensionOverlapResult.class, com.google.ads.googleads.v21.services.DimensionOverlapResult.Builder.class);
}
public static final int DIMENSION_FIELD_NUMBER = 1;
private int dimension_ = 0;
/**
* <pre>
* The dimension of all the attributes in this section.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension dimension = 1;</code>
* @return The enum numeric value on the wire for dimension.
*/
@java.lang.Override public int getDimensionValue() {
return dimension_;
}
/**
* <pre>
* The dimension of all the attributes in this section.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension dimension = 1;</code>
* @return The dimension.
*/
@java.lang.Override public com.google.ads.googleads.v21.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension getDimension() {
com.google.ads.googleads.v21.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension result = com.google.ads.googleads.v21.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension.forNumber(dimension_);
return result == null ? com.google.ads.googleads.v21.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension.UNRECOGNIZED : result;
}
public static final int ITEMS_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private java.util.List<com.google.ads.googleads.v21.services.AudienceOverlapItem> items_;
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.AudienceOverlapItem items = 2;</code>
*/
@java.lang.Override
public java.util.List<com.google.ads.googleads.v21.services.AudienceOverlapItem> getItemsList() {
return items_;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.AudienceOverlapItem items = 2;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.ads.googleads.v21.services.AudienceOverlapItemOrBuilder>
getItemsOrBuilderList() {
return items_;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.AudienceOverlapItem items = 2;</code>
*/
@java.lang.Override
public int getItemsCount() {
return items_.size();
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.AudienceOverlapItem items = 2;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v21.services.AudienceOverlapItem getItems(int index) {
return items_.get(index);
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.AudienceOverlapItem items = 2;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v21.services.AudienceOverlapItemOrBuilder getItemsOrBuilder(
int index) {
return items_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (dimension_ != com.google.ads.googleads.v21.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension.UNSPECIFIED.getNumber()) {
output.writeEnum(1, dimension_);
}
for (int i = 0; i < items_.size(); i++) {
output.writeMessage(2, items_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (dimension_ != com.google.ads.googleads.v21.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension.UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(1, dimension_);
}
for (int i = 0; i < items_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, items_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v21.services.DimensionOverlapResult)) {
return super.equals(obj);
}
com.google.ads.googleads.v21.services.DimensionOverlapResult other = (com.google.ads.googleads.v21.services.DimensionOverlapResult) obj;
if (dimension_ != other.dimension_) return false;
if (!getItemsList()
.equals(other.getItemsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + DIMENSION_FIELD_NUMBER;
hash = (53 * hash) + dimension_;
if (getItemsCount() > 0) {
hash = (37 * hash) + ITEMS_FIELD_NUMBER;
hash = (53 * hash) + getItemsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v21.services.DimensionOverlapResult parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.DimensionOverlapResult parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.DimensionOverlapResult parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.DimensionOverlapResult parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.DimensionOverlapResult parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.DimensionOverlapResult parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.DimensionOverlapResult parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.DimensionOverlapResult parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.DimensionOverlapResult parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.DimensionOverlapResult parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.DimensionOverlapResult parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.DimensionOverlapResult parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v21.services.DimensionOverlapResult prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* A list of audience attributes of a single dimension, including their overlap
* with a primary attribute, returned as part of a
* [GenerateAudienceOverlapInsightsResponse][google.ads.googleads.v21.services.GenerateAudienceOverlapInsightsResponse].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.services.DimensionOverlapResult}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.services.DimensionOverlapResult)
com.google.ads.googleads.v21.services.DimensionOverlapResultOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v21_services_DimensionOverlapResult_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v21_services_DimensionOverlapResult_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.services.DimensionOverlapResult.class, com.google.ads.googleads.v21.services.DimensionOverlapResult.Builder.class);
}
// Construct using com.google.ads.googleads.v21.services.DimensionOverlapResult.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
dimension_ = 0;
if (itemsBuilder_ == null) {
items_ = java.util.Collections.emptyList();
} else {
items_ = null;
itemsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v21.services.AudienceInsightsServiceProto.internal_static_google_ads_googleads_v21_services_DimensionOverlapResult_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.DimensionOverlapResult getDefaultInstanceForType() {
return com.google.ads.googleads.v21.services.DimensionOverlapResult.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v21.services.DimensionOverlapResult build() {
com.google.ads.googleads.v21.services.DimensionOverlapResult result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.DimensionOverlapResult buildPartial() {
com.google.ads.googleads.v21.services.DimensionOverlapResult result = new com.google.ads.googleads.v21.services.DimensionOverlapResult(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.ads.googleads.v21.services.DimensionOverlapResult result) {
if (itemsBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
items_ = java.util.Collections.unmodifiableList(items_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.items_ = items_;
} else {
result.items_ = itemsBuilder_.build();
}
}
private void buildPartial0(com.google.ads.googleads.v21.services.DimensionOverlapResult result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.dimension_ = dimension_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v21.services.DimensionOverlapResult) {
return mergeFrom((com.google.ads.googleads.v21.services.DimensionOverlapResult)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v21.services.DimensionOverlapResult other) {
if (other == com.google.ads.googleads.v21.services.DimensionOverlapResult.getDefaultInstance()) return this;
if (other.dimension_ != 0) {
setDimensionValue(other.getDimensionValue());
}
if (itemsBuilder_ == null) {
if (!other.items_.isEmpty()) {
if (items_.isEmpty()) {
items_ = other.items_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureItemsIsMutable();
items_.addAll(other.items_);
}
onChanged();
}
} else {
if (!other.items_.isEmpty()) {
if (itemsBuilder_.isEmpty()) {
itemsBuilder_.dispose();
itemsBuilder_ = null;
items_ = other.items_;
bitField0_ = (bitField0_ & ~0x00000002);
itemsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getItemsFieldBuilder() : null;
} else {
itemsBuilder_.addAllMessages(other.items_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
dimension_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 18: {
com.google.ads.googleads.v21.services.AudienceOverlapItem m =
input.readMessage(
com.google.ads.googleads.v21.services.AudienceOverlapItem.parser(),
extensionRegistry);
if (itemsBuilder_ == null) {
ensureItemsIsMutable();
items_.add(m);
} else {
itemsBuilder_.addMessage(m);
}
break;
} // case 18
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int dimension_ = 0;
/**
* <pre>
* The dimension of all the attributes in this section.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension dimension = 1;</code>
* @return The enum numeric value on the wire for dimension.
*/
@java.lang.Override public int getDimensionValue() {
return dimension_;
}
/**
* <pre>
* The dimension of all the attributes in this section.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension dimension = 1;</code>
* @param value The enum numeric value on the wire for dimension to set.
* @return This builder for chaining.
*/
public Builder setDimensionValue(int value) {
dimension_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* The dimension of all the attributes in this section.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension dimension = 1;</code>
* @return The dimension.
*/
@java.lang.Override
public com.google.ads.googleads.v21.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension getDimension() {
com.google.ads.googleads.v21.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension result = com.google.ads.googleads.v21.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension.forNumber(dimension_);
return result == null ? com.google.ads.googleads.v21.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension.UNRECOGNIZED : result;
}
/**
* <pre>
* The dimension of all the attributes in this section.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension dimension = 1;</code>
* @param value The dimension to set.
* @return This builder for chaining.
*/
public Builder setDimension(com.google.ads.googleads.v21.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
dimension_ = value.getNumber();
onChanged();
return this;
}
/**
* <pre>
* The dimension of all the attributes in this section.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.AudienceInsightsDimensionEnum.AudienceInsightsDimension dimension = 1;</code>
* @return This builder for chaining.
*/
public Builder clearDimension() {
bitField0_ = (bitField0_ & ~0x00000001);
dimension_ = 0;
onChanged();
return this;
}
private java.util.List<com.google.ads.googleads.v21.services.AudienceOverlapItem> items_ =
java.util.Collections.emptyList();
private void ensureItemsIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
items_ = new java.util.ArrayList<com.google.ads.googleads.v21.services.AudienceOverlapItem>(items_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v21.services.AudienceOverlapItem, com.google.ads.googleads.v21.services.AudienceOverlapItem.Builder, com.google.ads.googleads.v21.services.AudienceOverlapItemOrBuilder> itemsBuilder_;
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.AudienceOverlapItem items = 2;</code>
*/
public java.util.List<com.google.ads.googleads.v21.services.AudienceOverlapItem> getItemsList() {
if (itemsBuilder_ == null) {
return java.util.Collections.unmodifiableList(items_);
} else {
return itemsBuilder_.getMessageList();
}
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.AudienceOverlapItem items = 2;</code>
*/
public int getItemsCount() {
if (itemsBuilder_ == null) {
return items_.size();
} else {
return itemsBuilder_.getCount();
}
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.AudienceOverlapItem items = 2;</code>
*/
public com.google.ads.googleads.v21.services.AudienceOverlapItem getItems(int index) {
if (itemsBuilder_ == null) {
return items_.get(index);
} else {
return itemsBuilder_.getMessage(index);
}
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.AudienceOverlapItem items = 2;</code>
*/
public Builder setItems(
int index, com.google.ads.googleads.v21.services.AudienceOverlapItem value) {
if (itemsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureItemsIsMutable();
items_.set(index, value);
onChanged();
} else {
itemsBuilder_.setMessage(index, value);
}
return this;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.AudienceOverlapItem items = 2;</code>
*/
public Builder setItems(
int index, com.google.ads.googleads.v21.services.AudienceOverlapItem.Builder builderForValue) {
if (itemsBuilder_ == null) {
ensureItemsIsMutable();
items_.set(index, builderForValue.build());
onChanged();
} else {
itemsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.AudienceOverlapItem items = 2;</code>
*/
public Builder addItems(com.google.ads.googleads.v21.services.AudienceOverlapItem value) {
if (itemsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureItemsIsMutable();
items_.add(value);
onChanged();
} else {
itemsBuilder_.addMessage(value);
}
return this;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.AudienceOverlapItem items = 2;</code>
*/
public Builder addItems(
int index, com.google.ads.googleads.v21.services.AudienceOverlapItem value) {
if (itemsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureItemsIsMutable();
items_.add(index, value);
onChanged();
} else {
itemsBuilder_.addMessage(index, value);
}
return this;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.AudienceOverlapItem items = 2;</code>
*/
public Builder addItems(
com.google.ads.googleads.v21.services.AudienceOverlapItem.Builder builderForValue) {
if (itemsBuilder_ == null) {
ensureItemsIsMutable();
items_.add(builderForValue.build());
onChanged();
} else {
itemsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.AudienceOverlapItem items = 2;</code>
*/
public Builder addItems(
int index, com.google.ads.googleads.v21.services.AudienceOverlapItem.Builder builderForValue) {
if (itemsBuilder_ == null) {
ensureItemsIsMutable();
items_.add(index, builderForValue.build());
onChanged();
} else {
itemsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.AudienceOverlapItem items = 2;</code>
*/
public Builder addAllItems(
java.lang.Iterable<? extends com.google.ads.googleads.v21.services.AudienceOverlapItem> values) {
if (itemsBuilder_ == null) {
ensureItemsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, items_);
onChanged();
} else {
itemsBuilder_.addAllMessages(values);
}
return this;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.AudienceOverlapItem items = 2;</code>
*/
public Builder clearItems() {
if (itemsBuilder_ == null) {
items_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
itemsBuilder_.clear();
}
return this;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.AudienceOverlapItem items = 2;</code>
*/
public Builder removeItems(int index) {
if (itemsBuilder_ == null) {
ensureItemsIsMutable();
items_.remove(index);
onChanged();
} else {
itemsBuilder_.remove(index);
}
return this;
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.AudienceOverlapItem items = 2;</code>
*/
public com.google.ads.googleads.v21.services.AudienceOverlapItem.Builder getItemsBuilder(
int index) {
return getItemsFieldBuilder().getBuilder(index);
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.AudienceOverlapItem items = 2;</code>
*/
public com.google.ads.googleads.v21.services.AudienceOverlapItemOrBuilder getItemsOrBuilder(
int index) {
if (itemsBuilder_ == null) {
return items_.get(index); } else {
return itemsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.AudienceOverlapItem items = 2;</code>
*/
public java.util.List<? extends com.google.ads.googleads.v21.services.AudienceOverlapItemOrBuilder>
getItemsOrBuilderList() {
if (itemsBuilder_ != null) {
return itemsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(items_);
}
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.AudienceOverlapItem items = 2;</code>
*/
public com.google.ads.googleads.v21.services.AudienceOverlapItem.Builder addItemsBuilder() {
return getItemsFieldBuilder().addBuilder(
com.google.ads.googleads.v21.services.AudienceOverlapItem.getDefaultInstance());
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.AudienceOverlapItem items = 2;</code>
*/
public com.google.ads.googleads.v21.services.AudienceOverlapItem.Builder addItemsBuilder(
int index) {
return getItemsFieldBuilder().addBuilder(
index, com.google.ads.googleads.v21.services.AudienceOverlapItem.getDefaultInstance());
}
/**
* <pre>
* The attributes and their overlap with the primary attribute.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.AudienceOverlapItem items = 2;</code>
*/
public java.util.List<com.google.ads.googleads.v21.services.AudienceOverlapItem.Builder>
getItemsBuilderList() {
return getItemsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v21.services.AudienceOverlapItem, com.google.ads.googleads.v21.services.AudienceOverlapItem.Builder, com.google.ads.googleads.v21.services.AudienceOverlapItemOrBuilder>
getItemsFieldBuilder() {
if (itemsBuilder_ == null) {
itemsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v21.services.AudienceOverlapItem, com.google.ads.googleads.v21.services.AudienceOverlapItem.Builder, com.google.ads.googleads.v21.services.AudienceOverlapItemOrBuilder>(
items_,
((bitField0_ & 0x00000002) != 0),
getParentForChildren(),
isClean());
items_ = null;
}
return itemsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.services.DimensionOverlapResult)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v21.services.DimensionOverlapResult)
private static final com.google.ads.googleads.v21.services.DimensionOverlapResult DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v21.services.DimensionOverlapResult();
}
public static com.google.ads.googleads.v21.services.DimensionOverlapResult getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DimensionOverlapResult>
PARSER = new com.google.protobuf.AbstractParser<DimensionOverlapResult>() {
@java.lang.Override
public DimensionOverlapResult parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<DimensionOverlapResult> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DimensionOverlapResult> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.DimensionOverlapResult getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,969 | java-vpcaccess/proto-google-cloud-vpcaccess-v1/src/main/java/com/google/cloud/vpcaccess/v1/CreateConnectorRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/vpcaccess/v1/vpc_access.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.vpcaccess.v1;
/**
*
*
* <pre>
* Request for creating a Serverless VPC Access connector.
* </pre>
*
* Protobuf type {@code google.cloud.vpcaccess.v1.CreateConnectorRequest}
*/
public final class CreateConnectorRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.vpcaccess.v1.CreateConnectorRequest)
CreateConnectorRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateConnectorRequest.newBuilder() to construct.
private CreateConnectorRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateConnectorRequest() {
parent_ = "";
connectorId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateConnectorRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.vpcaccess.v1.VpcAccessProto
.internal_static_google_cloud_vpcaccess_v1_CreateConnectorRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vpcaccess.v1.VpcAccessProto
.internal_static_google_cloud_vpcaccess_v1_CreateConnectorRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vpcaccess.v1.CreateConnectorRequest.class,
com.google.cloud.vpcaccess.v1.CreateConnectorRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The project and location in which the configuration should be created,
* specified in the format `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The project and location in which the configuration should be created,
* specified in the format `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int CONNECTOR_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object connectorId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for this connector.
* </pre>
*
* <code>string connector_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The connectorId.
*/
@java.lang.Override
public java.lang.String getConnectorId() {
java.lang.Object ref = connectorId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
connectorId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The ID to use for this connector.
* </pre>
*
* <code>string connector_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for connectorId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getConnectorIdBytes() {
java.lang.Object ref = connectorId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
connectorId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int CONNECTOR_FIELD_NUMBER = 3;
private com.google.cloud.vpcaccess.v1.Connector connector_;
/**
*
*
* <pre>
* Required. Resource to create.
* </pre>
*
* <code>
* .google.cloud.vpcaccess.v1.Connector connector = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the connector field is set.
*/
@java.lang.Override
public boolean hasConnector() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Resource to create.
* </pre>
*
* <code>
* .google.cloud.vpcaccess.v1.Connector connector = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The connector.
*/
@java.lang.Override
public com.google.cloud.vpcaccess.v1.Connector getConnector() {
return connector_ == null
? com.google.cloud.vpcaccess.v1.Connector.getDefaultInstance()
: connector_;
}
/**
*
*
* <pre>
* Required. Resource to create.
* </pre>
*
* <code>
* .google.cloud.vpcaccess.v1.Connector connector = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.vpcaccess.v1.ConnectorOrBuilder getConnectorOrBuilder() {
return connector_ == null
? com.google.cloud.vpcaccess.v1.Connector.getDefaultInstance()
: connector_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(connectorId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, connectorId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getConnector());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(connectorId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, connectorId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getConnector());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.vpcaccess.v1.CreateConnectorRequest)) {
return super.equals(obj);
}
com.google.cloud.vpcaccess.v1.CreateConnectorRequest other =
(com.google.cloud.vpcaccess.v1.CreateConnectorRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getConnectorId().equals(other.getConnectorId())) return false;
if (hasConnector() != other.hasConnector()) return false;
if (hasConnector()) {
if (!getConnector().equals(other.getConnector())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + CONNECTOR_ID_FIELD_NUMBER;
hash = (53 * hash) + getConnectorId().hashCode();
if (hasConnector()) {
hash = (37 * hash) + CONNECTOR_FIELD_NUMBER;
hash = (53 * hash) + getConnector().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.vpcaccess.v1.CreateConnectorRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vpcaccess.v1.CreateConnectorRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vpcaccess.v1.CreateConnectorRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vpcaccess.v1.CreateConnectorRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vpcaccess.v1.CreateConnectorRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vpcaccess.v1.CreateConnectorRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vpcaccess.v1.CreateConnectorRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vpcaccess.v1.CreateConnectorRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.vpcaccess.v1.CreateConnectorRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.vpcaccess.v1.CreateConnectorRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.vpcaccess.v1.CreateConnectorRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vpcaccess.v1.CreateConnectorRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.vpcaccess.v1.CreateConnectorRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for creating a Serverless VPC Access connector.
* </pre>
*
* Protobuf type {@code google.cloud.vpcaccess.v1.CreateConnectorRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.vpcaccess.v1.CreateConnectorRequest)
com.google.cloud.vpcaccess.v1.CreateConnectorRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.vpcaccess.v1.VpcAccessProto
.internal_static_google_cloud_vpcaccess_v1_CreateConnectorRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vpcaccess.v1.VpcAccessProto
.internal_static_google_cloud_vpcaccess_v1_CreateConnectorRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vpcaccess.v1.CreateConnectorRequest.class,
com.google.cloud.vpcaccess.v1.CreateConnectorRequest.Builder.class);
}
// Construct using com.google.cloud.vpcaccess.v1.CreateConnectorRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getConnectorFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
connectorId_ = "";
connector_ = null;
if (connectorBuilder_ != null) {
connectorBuilder_.dispose();
connectorBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.vpcaccess.v1.VpcAccessProto
.internal_static_google_cloud_vpcaccess_v1_CreateConnectorRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.vpcaccess.v1.CreateConnectorRequest getDefaultInstanceForType() {
return com.google.cloud.vpcaccess.v1.CreateConnectorRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.vpcaccess.v1.CreateConnectorRequest build() {
com.google.cloud.vpcaccess.v1.CreateConnectorRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.vpcaccess.v1.CreateConnectorRequest buildPartial() {
com.google.cloud.vpcaccess.v1.CreateConnectorRequest result =
new com.google.cloud.vpcaccess.v1.CreateConnectorRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.vpcaccess.v1.CreateConnectorRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.connectorId_ = connectorId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.connector_ = connectorBuilder_ == null ? connector_ : connectorBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.vpcaccess.v1.CreateConnectorRequest) {
return mergeFrom((com.google.cloud.vpcaccess.v1.CreateConnectorRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.vpcaccess.v1.CreateConnectorRequest other) {
if (other == com.google.cloud.vpcaccess.v1.CreateConnectorRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getConnectorId().isEmpty()) {
connectorId_ = other.connectorId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasConnector()) {
mergeConnector(other.getConnector());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
connectorId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getConnectorFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The project and location in which the configuration should be created,
* specified in the format `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The project and location in which the configuration should be created,
* specified in the format `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The project and location in which the configuration should be created,
* specified in the format `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The project and location in which the configuration should be created,
* specified in the format `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The project and location in which the configuration should be created,
* specified in the format `projects/*/locations/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object connectorId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for this connector.
* </pre>
*
* <code>string connector_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The connectorId.
*/
public java.lang.String getConnectorId() {
java.lang.Object ref = connectorId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
connectorId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for this connector.
* </pre>
*
* <code>string connector_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for connectorId.
*/
public com.google.protobuf.ByteString getConnectorIdBytes() {
java.lang.Object ref = connectorId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
connectorId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for this connector.
* </pre>
*
* <code>string connector_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The connectorId to set.
* @return This builder for chaining.
*/
public Builder setConnectorId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
connectorId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for this connector.
* </pre>
*
* <code>string connector_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearConnectorId() {
connectorId_ = getDefaultInstance().getConnectorId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for this connector.
* </pre>
*
* <code>string connector_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for connectorId to set.
* @return This builder for chaining.
*/
public Builder setConnectorIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
connectorId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.cloud.vpcaccess.v1.Connector connector_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.vpcaccess.v1.Connector,
com.google.cloud.vpcaccess.v1.Connector.Builder,
com.google.cloud.vpcaccess.v1.ConnectorOrBuilder>
connectorBuilder_;
/**
*
*
* <pre>
* Required. Resource to create.
* </pre>
*
* <code>
* .google.cloud.vpcaccess.v1.Connector connector = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the connector field is set.
*/
public boolean hasConnector() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Required. Resource to create.
* </pre>
*
* <code>
* .google.cloud.vpcaccess.v1.Connector connector = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The connector.
*/
public com.google.cloud.vpcaccess.v1.Connector getConnector() {
if (connectorBuilder_ == null) {
return connector_ == null
? com.google.cloud.vpcaccess.v1.Connector.getDefaultInstance()
: connector_;
} else {
return connectorBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Resource to create.
* </pre>
*
* <code>
* .google.cloud.vpcaccess.v1.Connector connector = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setConnector(com.google.cloud.vpcaccess.v1.Connector value) {
if (connectorBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
connector_ = value;
} else {
connectorBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Resource to create.
* </pre>
*
* <code>
* .google.cloud.vpcaccess.v1.Connector connector = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setConnector(com.google.cloud.vpcaccess.v1.Connector.Builder builderForValue) {
if (connectorBuilder_ == null) {
connector_ = builderForValue.build();
} else {
connectorBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Resource to create.
* </pre>
*
* <code>
* .google.cloud.vpcaccess.v1.Connector connector = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeConnector(com.google.cloud.vpcaccess.v1.Connector value) {
if (connectorBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& connector_ != null
&& connector_ != com.google.cloud.vpcaccess.v1.Connector.getDefaultInstance()) {
getConnectorBuilder().mergeFrom(value);
} else {
connector_ = value;
}
} else {
connectorBuilder_.mergeFrom(value);
}
if (connector_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Resource to create.
* </pre>
*
* <code>
* .google.cloud.vpcaccess.v1.Connector connector = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearConnector() {
bitField0_ = (bitField0_ & ~0x00000004);
connector_ = null;
if (connectorBuilder_ != null) {
connectorBuilder_.dispose();
connectorBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Resource to create.
* </pre>
*
* <code>
* .google.cloud.vpcaccess.v1.Connector connector = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.vpcaccess.v1.Connector.Builder getConnectorBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getConnectorFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Resource to create.
* </pre>
*
* <code>
* .google.cloud.vpcaccess.v1.Connector connector = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.vpcaccess.v1.ConnectorOrBuilder getConnectorOrBuilder() {
if (connectorBuilder_ != null) {
return connectorBuilder_.getMessageOrBuilder();
} else {
return connector_ == null
? com.google.cloud.vpcaccess.v1.Connector.getDefaultInstance()
: connector_;
}
}
/**
*
*
* <pre>
* Required. Resource to create.
* </pre>
*
* <code>
* .google.cloud.vpcaccess.v1.Connector connector = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.vpcaccess.v1.Connector,
com.google.cloud.vpcaccess.v1.Connector.Builder,
com.google.cloud.vpcaccess.v1.ConnectorOrBuilder>
getConnectorFieldBuilder() {
if (connectorBuilder_ == null) {
connectorBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.vpcaccess.v1.Connector,
com.google.cloud.vpcaccess.v1.Connector.Builder,
com.google.cloud.vpcaccess.v1.ConnectorOrBuilder>(
getConnector(), getParentForChildren(), isClean());
connector_ = null;
}
return connectorBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.vpcaccess.v1.CreateConnectorRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.vpcaccess.v1.CreateConnectorRequest)
private static final com.google.cloud.vpcaccess.v1.CreateConnectorRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.vpcaccess.v1.CreateConnectorRequest();
}
public static com.google.cloud.vpcaccess.v1.CreateConnectorRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateConnectorRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateConnectorRequest>() {
@java.lang.Override
public CreateConnectorRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateConnectorRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateConnectorRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.vpcaccess.v1.CreateConnectorRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,051 | java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/BatchCreateTensorboardTimeSeriesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/tensorboard_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1beta1;
/**
*
*
* <pre>
* Response message for
* [TensorboardService.BatchCreateTensorboardTimeSeries][google.cloud.aiplatform.v1beta1.TensorboardService.BatchCreateTensorboardTimeSeries].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse}
*/
public final class BatchCreateTensorboardTimeSeriesResponse
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse)
BatchCreateTensorboardTimeSeriesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use BatchCreateTensorboardTimeSeriesResponse.newBuilder() to construct.
private BatchCreateTensorboardTimeSeriesResponse(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private BatchCreateTensorboardTimeSeriesResponse() {
tensorboardTimeSeries_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new BatchCreateTensorboardTimeSeriesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.TensorboardServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_BatchCreateTensorboardTimeSeriesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.TensorboardServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_BatchCreateTensorboardTimeSeriesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse.class,
com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse.Builder
.class);
}
public static final int TENSORBOARD_TIME_SERIES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries>
tensorboardTimeSeries_;
/**
*
*
* <pre>
* The created TensorboardTimeSeries.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 1;
* </code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries>
getTensorboardTimeSeriesList() {
return tensorboardTimeSeries_;
}
/**
*
*
* <pre>
* The created TensorboardTimeSeries.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 1;
* </code>
*/
@java.lang.Override
public java.util.List<
? extends com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeriesOrBuilder>
getTensorboardTimeSeriesOrBuilderList() {
return tensorboardTimeSeries_;
}
/**
*
*
* <pre>
* The created TensorboardTimeSeries.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 1;
* </code>
*/
@java.lang.Override
public int getTensorboardTimeSeriesCount() {
return tensorboardTimeSeries_.size();
}
/**
*
*
* <pre>
* The created TensorboardTimeSeries.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 1;
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries getTensorboardTimeSeries(
int index) {
return tensorboardTimeSeries_.get(index);
}
/**
*
*
* <pre>
* The created TensorboardTimeSeries.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 1;
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeriesOrBuilder
getTensorboardTimeSeriesOrBuilder(int index) {
return tensorboardTimeSeries_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < tensorboardTimeSeries_.size(); i++) {
output.writeMessage(1, tensorboardTimeSeries_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < tensorboardTimeSeries_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
1, tensorboardTimeSeries_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse other =
(com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse) obj;
if (!getTensorboardTimeSeriesList().equals(other.getTensorboardTimeSeriesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getTensorboardTimeSeriesCount() > 0) {
hash = (37 * hash) + TENSORBOARD_TIME_SERIES_FIELD_NUMBER;
hash = (53 * hash) + getTensorboardTimeSeriesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [TensorboardService.BatchCreateTensorboardTimeSeries][google.cloud.aiplatform.v1beta1.TensorboardService.BatchCreateTensorboardTimeSeries].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse)
com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.TensorboardServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_BatchCreateTensorboardTimeSeriesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.TensorboardServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_BatchCreateTensorboardTimeSeriesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse.class,
com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse.Builder
.class);
}
// Construct using
// com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (tensorboardTimeSeriesBuilder_ == null) {
tensorboardTimeSeries_ = java.util.Collections.emptyList();
} else {
tensorboardTimeSeries_ = null;
tensorboardTimeSeriesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1beta1.TensorboardServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_BatchCreateTensorboardTimeSeriesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse
getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse build() {
com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse
buildPartial() {
com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse result =
new com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse result) {
if (tensorboardTimeSeriesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
tensorboardTimeSeries_ = java.util.Collections.unmodifiableList(tensorboardTimeSeries_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.tensorboardTimeSeries_ = tensorboardTimeSeries_;
} else {
result.tensorboardTimeSeries_ = tensorboardTimeSeriesBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse) {
return mergeFrom(
(com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse other) {
if (other
== com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse
.getDefaultInstance()) return this;
if (tensorboardTimeSeriesBuilder_ == null) {
if (!other.tensorboardTimeSeries_.isEmpty()) {
if (tensorboardTimeSeries_.isEmpty()) {
tensorboardTimeSeries_ = other.tensorboardTimeSeries_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureTensorboardTimeSeriesIsMutable();
tensorboardTimeSeries_.addAll(other.tensorboardTimeSeries_);
}
onChanged();
}
} else {
if (!other.tensorboardTimeSeries_.isEmpty()) {
if (tensorboardTimeSeriesBuilder_.isEmpty()) {
tensorboardTimeSeriesBuilder_.dispose();
tensorboardTimeSeriesBuilder_ = null;
tensorboardTimeSeries_ = other.tensorboardTimeSeries_;
bitField0_ = (bitField0_ & ~0x00000001);
tensorboardTimeSeriesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getTensorboardTimeSeriesFieldBuilder()
: null;
} else {
tensorboardTimeSeriesBuilder_.addAllMessages(other.tensorboardTimeSeries_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries m =
input.readMessage(
com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries.parser(),
extensionRegistry);
if (tensorboardTimeSeriesBuilder_ == null) {
ensureTensorboardTimeSeriesIsMutable();
tensorboardTimeSeries_.add(m);
} else {
tensorboardTimeSeriesBuilder_.addMessage(m);
}
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries>
tensorboardTimeSeries_ = java.util.Collections.emptyList();
private void ensureTensorboardTimeSeriesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
tensorboardTimeSeries_ =
new java.util.ArrayList<com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries>(
tensorboardTimeSeries_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries,
com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries.Builder,
com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeriesOrBuilder>
tensorboardTimeSeriesBuilder_;
/**
*
*
* <pre>
* The created TensorboardTimeSeries.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 1;
* </code>
*/
public java.util.List<com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries>
getTensorboardTimeSeriesList() {
if (tensorboardTimeSeriesBuilder_ == null) {
return java.util.Collections.unmodifiableList(tensorboardTimeSeries_);
} else {
return tensorboardTimeSeriesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The created TensorboardTimeSeries.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 1;
* </code>
*/
public int getTensorboardTimeSeriesCount() {
if (tensorboardTimeSeriesBuilder_ == null) {
return tensorboardTimeSeries_.size();
} else {
return tensorboardTimeSeriesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The created TensorboardTimeSeries.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 1;
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries getTensorboardTimeSeries(
int index) {
if (tensorboardTimeSeriesBuilder_ == null) {
return tensorboardTimeSeries_.get(index);
} else {
return tensorboardTimeSeriesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The created TensorboardTimeSeries.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 1;
* </code>
*/
public Builder setTensorboardTimeSeries(
int index, com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries value) {
if (tensorboardTimeSeriesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTensorboardTimeSeriesIsMutable();
tensorboardTimeSeries_.set(index, value);
onChanged();
} else {
tensorboardTimeSeriesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The created TensorboardTimeSeries.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 1;
* </code>
*/
public Builder setTensorboardTimeSeries(
int index,
com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries.Builder builderForValue) {
if (tensorboardTimeSeriesBuilder_ == null) {
ensureTensorboardTimeSeriesIsMutable();
tensorboardTimeSeries_.set(index, builderForValue.build());
onChanged();
} else {
tensorboardTimeSeriesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The created TensorboardTimeSeries.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 1;
* </code>
*/
public Builder addTensorboardTimeSeries(
com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries value) {
if (tensorboardTimeSeriesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTensorboardTimeSeriesIsMutable();
tensorboardTimeSeries_.add(value);
onChanged();
} else {
tensorboardTimeSeriesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The created TensorboardTimeSeries.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 1;
* </code>
*/
public Builder addTensorboardTimeSeries(
int index, com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries value) {
if (tensorboardTimeSeriesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTensorboardTimeSeriesIsMutable();
tensorboardTimeSeries_.add(index, value);
onChanged();
} else {
tensorboardTimeSeriesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The created TensorboardTimeSeries.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 1;
* </code>
*/
public Builder addTensorboardTimeSeries(
com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries.Builder builderForValue) {
if (tensorboardTimeSeriesBuilder_ == null) {
ensureTensorboardTimeSeriesIsMutable();
tensorboardTimeSeries_.add(builderForValue.build());
onChanged();
} else {
tensorboardTimeSeriesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The created TensorboardTimeSeries.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 1;
* </code>
*/
public Builder addTensorboardTimeSeries(
int index,
com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries.Builder builderForValue) {
if (tensorboardTimeSeriesBuilder_ == null) {
ensureTensorboardTimeSeriesIsMutable();
tensorboardTimeSeries_.add(index, builderForValue.build());
onChanged();
} else {
tensorboardTimeSeriesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The created TensorboardTimeSeries.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 1;
* </code>
*/
public Builder addAllTensorboardTimeSeries(
java.lang.Iterable<? extends com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries>
values) {
if (tensorboardTimeSeriesBuilder_ == null) {
ensureTensorboardTimeSeriesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, tensorboardTimeSeries_);
onChanged();
} else {
tensorboardTimeSeriesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The created TensorboardTimeSeries.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 1;
* </code>
*/
public Builder clearTensorboardTimeSeries() {
if (tensorboardTimeSeriesBuilder_ == null) {
tensorboardTimeSeries_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
tensorboardTimeSeriesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The created TensorboardTimeSeries.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 1;
* </code>
*/
public Builder removeTensorboardTimeSeries(int index) {
if (tensorboardTimeSeriesBuilder_ == null) {
ensureTensorboardTimeSeriesIsMutable();
tensorboardTimeSeries_.remove(index);
onChanged();
} else {
tensorboardTimeSeriesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The created TensorboardTimeSeries.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 1;
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries.Builder
getTensorboardTimeSeriesBuilder(int index) {
return getTensorboardTimeSeriesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The created TensorboardTimeSeries.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 1;
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeriesOrBuilder
getTensorboardTimeSeriesOrBuilder(int index) {
if (tensorboardTimeSeriesBuilder_ == null) {
return tensorboardTimeSeries_.get(index);
} else {
return tensorboardTimeSeriesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The created TensorboardTimeSeries.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 1;
* </code>
*/
public java.util.List<
? extends com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeriesOrBuilder>
getTensorboardTimeSeriesOrBuilderList() {
if (tensorboardTimeSeriesBuilder_ != null) {
return tensorboardTimeSeriesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(tensorboardTimeSeries_);
}
}
/**
*
*
* <pre>
* The created TensorboardTimeSeries.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 1;
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries.Builder
addTensorboardTimeSeriesBuilder() {
return getTensorboardTimeSeriesFieldBuilder()
.addBuilder(
com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries.getDefaultInstance());
}
/**
*
*
* <pre>
* The created TensorboardTimeSeries.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 1;
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries.Builder
addTensorboardTimeSeriesBuilder(int index) {
return getTensorboardTimeSeriesFieldBuilder()
.addBuilder(
index,
com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries.getDefaultInstance());
}
/**
*
*
* <pre>
* The created TensorboardTimeSeries.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 1;
* </code>
*/
public java.util.List<com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries.Builder>
getTensorboardTimeSeriesBuilderList() {
return getTensorboardTimeSeriesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries,
com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries.Builder,
com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeriesOrBuilder>
getTensorboardTimeSeriesFieldBuilder() {
if (tensorboardTimeSeriesBuilder_ == null) {
tensorboardTimeSeriesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries,
com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries.Builder,
com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeriesOrBuilder>(
tensorboardTimeSeries_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
tensorboardTimeSeries_ = null;
}
return tensorboardTimeSeriesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse)
private static final com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse();
}
public static com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<BatchCreateTensorboardTimeSeriesResponse> PARSER =
new com.google.protobuf.AbstractParser<BatchCreateTensorboardTimeSeriesResponse>() {
@java.lang.Override
public BatchCreateTensorboardTimeSeriesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<BatchCreateTensorboardTimeSeriesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<BatchCreateTensorboardTimeSeriesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.BatchCreateTensorboardTimeSeriesResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
oracle/graal | 36,364 | compiler/src/jdk.graal.compiler/src/jdk/graal/compiler/hotspot/CompilationTask.java | /*
* Copyright (c) 2012, 2025, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package jdk.graal.compiler.hotspot;
import static jdk.graal.compiler.core.CompilationWrapper.ExceptionAction.Diagnose;
import static jdk.graal.compiler.core.CompilationWrapper.ExceptionAction.ExitVM;
import static jdk.graal.compiler.core.GraalCompilerOptions.CompilationBailoutAsFailure;
import static jdk.graal.compiler.core.GraalCompilerOptions.CompilationFailureAction;
import static jdk.graal.compiler.core.GraalCompilerOptions.PrintCompilation;
import static jdk.graal.compiler.core.phases.HighTier.Options.Inline;
import static jdk.graal.compiler.hotspot.CompilationTask.Options.MethodRecompilationLimit;
import static jdk.graal.compiler.java.BytecodeParserOptions.InlineDuringParsing;
import java.io.PrintStream;
import java.util.List;
import java.util.ListIterator;
import org.graalvm.collections.EconomicMap;
import jdk.graal.compiler.api.replacements.SnippetReflectionProvider;
import jdk.graal.compiler.code.CompilationResult;
import jdk.graal.compiler.core.CompilationPrinter;
import jdk.graal.compiler.core.CompilationWatchDog;
import jdk.graal.compiler.core.CompilationWrapper;
import jdk.graal.compiler.core.common.CompilationIdentifier;
import jdk.graal.compiler.core.common.LibGraalSupport;
import jdk.graal.compiler.debug.Assertions;
import jdk.graal.compiler.debug.CounterKey;
import jdk.graal.compiler.debug.DebugCloseable;
import jdk.graal.compiler.debug.DebugContext;
import jdk.graal.compiler.debug.DebugContext.Builder;
import jdk.graal.compiler.debug.DebugContext.Description;
import jdk.graal.compiler.debug.DebugDumpHandlersFactory;
import jdk.graal.compiler.debug.DebugDumpScope;
import jdk.graal.compiler.debug.GraalError;
import jdk.graal.compiler.debug.MethodFilter;
import jdk.graal.compiler.debug.TTY;
import jdk.graal.compiler.debug.TimerKey;
import jdk.graal.compiler.hotspot.replaycomp.ReplayCompilationSupport;
import jdk.graal.compiler.nodes.StructuredGraph;
import jdk.graal.compiler.nodes.spi.ProfileProvider;
import jdk.graal.compiler.nodes.spi.StableProfileProvider;
import jdk.graal.compiler.nodes.spi.StableProfileProvider.TypeFilter;
import jdk.graal.compiler.options.Option;
import jdk.graal.compiler.options.OptionKey;
import jdk.graal.compiler.options.OptionType;
import jdk.graal.compiler.options.OptionValues;
import jdk.graal.compiler.options.OptionsParser;
import jdk.graal.compiler.phases.BasePhase;
import jdk.graal.compiler.phases.common.DeoptimizationGroupingPhase;
import jdk.graal.compiler.phases.common.ForceDeoptSpeculationPhase;
import jdk.graal.compiler.phases.schedule.SchedulePhase;
import jdk.graal.compiler.phases.tiers.LowTierContext;
import jdk.graal.compiler.phases.tiers.MidTierContext;
import jdk.graal.compiler.phases.tiers.Suites;
import jdk.graal.compiler.printer.GraalDebugHandlersFactory;
import jdk.graal.compiler.serviceprovider.GraalServices;
import jdk.vm.ci.code.BailoutException;
import jdk.vm.ci.hotspot.HotSpotCompilationRequest;
import jdk.vm.ci.hotspot.HotSpotCompilationRequestResult;
import jdk.vm.ci.hotspot.HotSpotInstalledCode;
import jdk.vm.ci.hotspot.HotSpotJVMCIRuntime;
import jdk.vm.ci.hotspot.HotSpotNmethod;
import jdk.vm.ci.hotspot.HotSpotResolvedJavaMethod;
import jdk.vm.ci.meta.JavaTypeProfile;
import jdk.vm.ci.meta.ProfilingInfo;
import jdk.vm.ci.meta.ResolvedJavaMethod;
import jdk.vm.ci.runtime.JVMCICompiler;
public class CompilationTask implements CompilationWatchDog.EventHandler {
public static class Options {
@Option(help = """
Options which are enabled based on the method being compiled.
The basic syntax is a MethodFilter option specification followed by a list of options to be set for that compilation.
"MethodFilter:" is used to distinguish this from normal usage of MethodFilter as option.
This can be repeated multiple times with each MethodFilter option separating the groups.
For example:
" -D""" + HotSpotGraalOptionValues.GRAAL_OPTION_PROPERTY_PREFIX + """
PerMethodOptions=MethodFilter:String.indexOf SpeculativeGuardMovement=false MethodFilter:Integer.* SpeculativeGuardMovement=false
disables SpeculativeGuardMovement for compiles of String.indexOf and all methods in Integer.
If the value starts with a non-letter character, that
character is used as the separator between options instead of a space.""")//
public static final OptionKey<String> PerMethodOptions = new OptionKey<>(null);
@Option(help = "Hard limit on the number of recompilations to avoid deopt loops. Exceeding the limit results in a permanent bailout. " + //
"Negative value means the limit is disabled. The default is -1 (disabled).", type = OptionType.Debug)//
public static final OptionKey<Integer> MethodRecompilationLimit = new OptionKey<>(-1);
@Option(help = "When the number of recompilations exceeds the limit, enable the detection of repeated identical deopts and report the source of the deopt loop when detected. " + // +
"Negative value means the limit is disabled. The default is -1 (disabled).", type = OptionType.Debug)//
public static final OptionKey<Integer> DetectRecompilationLimit = new OptionKey<>(-1);
}
@Override
public void onStuckCompilation(CompilationWatchDog watchDog, Thread watched, CompilationIdentifier compilation, StackTraceElement[] stackTrace, long stuckTime) {
CompilationWatchDog.EventHandler.super.onStuckCompilation(watchDog, watched, compilation, stackTrace, stuckTime);
TTY.println("Compilation %s on %s appears stuck - exiting VM", compilation, watched);
HotSpotGraalServices.exit(STUCK_COMPILATION_EXIT_CODE, jvmciRuntime);
}
private final HotSpotJVMCIRuntime jvmciRuntime;
protected final HotSpotGraalCompiler compiler;
protected final HotSpotCompilationIdentifier compilationId;
private HotSpotInstalledCode installedCode;
/**
* Specifies whether the compilation result is installed as the
* {@linkplain HotSpotNmethod#isDefault() default} nmethod for the compiled method.
*/
private final boolean installAsDefault;
private final StableProfileProvider profileProvider;
private final boolean shouldRetainLocalVariables;
private final boolean shouldUsePreciseUnresolvedDeopts;
private final boolean eagerResolving;
protected boolean checkRecompileCycle;
protected final int decompileCount;
/**
* Filter describing which types in {@link JavaTypeProfile} should be considered for profile
* writing. This allows programmatically changing which types are saved.
*/
private TypeFilter profileSaveFilter;
protected class HotSpotCompilationWrapper extends CompilationWrapper<HotSpotCompilationRequestResult> {
protected CompilationResult result;
protected StructuredGraph graph;
protected HotSpotCompilationWrapper() {
super(compiler.getGraalRuntime().getOutputDirectory(), compiler.getGraalRuntime().getCompilationProblemsPerAction());
}
@Override
protected DebugContext createRetryDebugContext(DebugContext initialDebug, OptionValues retryOptions, PrintStream logStream) {
SnippetReflectionProvider snippetReflection = compiler.getGraalRuntime().getHostProviders().getSnippetReflection();
Description description = initialDebug.getDescription();
DebugDumpHandlersFactory factory = new GraalDebugHandlersFactory(snippetReflection);
return new Builder(retryOptions, factory).globalMetrics(initialDebug.getGlobalMetrics()).description(description).logStream(logStream).build();
}
@Override
protected void exitHostVM(int status) {
HotSpotGraalServices.exit(status, jvmciRuntime);
}
@Override
public String toString() {
return getMethod().format("%H.%n(%p) @ " + getEntryBCI());
}
@Override
protected void parseRetryOptions(String[] options, EconomicMap<OptionKey<?>, Object> values) {
OptionsParser.parseOptions(options, values, OptionsParser.getOptionsLoader());
}
@Override
protected HotSpotCompilationRequestResult handleException(Throwable t) {
if (t instanceof BailoutException bailout) {
/*
* Handling of permanent bailouts: Permanent bailouts that can happen for example
* due to unsupported unstructured control flow in the bytecodes of a method must
* not be retried. Hotspot compile broker will ensure that no recompilation at the
* given tier will happen if retry is false.
*/
return HotSpotCompilationRequestResult.failure(bailout.getMessage(), !bailout.isPermanent());
}
if (t instanceof ForceDeoptSpeculationPhase.TooManyDeoptimizationsError) {
// Handle this as a permanent bailout
return HotSpotCompilationRequestResult.failure(t.getMessage(), false);
}
/*
* Treat random exceptions from the compiler as indicating a problem compiling this
* method. Report the result of toString instead of getMessage to ensure that the
* exception type is included in the output in case there's no detail message.
*/
return HotSpotCompilationRequestResult.failure(t.toString(), false);
}
@SuppressWarnings("try")
@Override
protected void dumpOnError(DebugContext errorContext, Throwable cause) {
if (graph != null) {
try (DebugContext.Scope s = errorContext.scope("DumpOnError", graph, new DebugDumpScope(getIdString(), true), new DebugDumpScope("Original failure"))) {
errorContext.forceDump(graph, "Exception: %s", cause);
} catch (Throwable t) {
throw errorContext.handle(t);
}
}
}
@Override
protected ExceptionAction lookupAction(OptionValues values, Throwable cause) {
if (cause instanceof BailoutException bailout) {
if (bailout.isPermanent()) {
// Respect current action if it has been explicitly set.
if (!CompilationBailoutAsFailure.hasBeenSet(values)) {
// Get more info for permanent bailouts during bootstrap.
if (compiler.getGraalRuntime().isBootstrapping()) {
return Diagnose;
}
}
}
if (!CompilationBailoutAsFailure.getValue(values)) {
return super.lookupAction(values, cause);
}
}
// Respect current action if it has been explicitly set.
if (!CompilationFailureAction.hasBeenSet(values)) {
// Automatically exit on failure during bootstrap.
if (compiler.getGraalRuntime().isBootstrapping()) {
TTY.println("Treating CompilationFailureAction as ExitVM due to exception throw during bootstrap: " + cause);
return ExitVM;
}
// Automatically exit on failure when assertions are enabled in libgraal
if (shouldExitVM(cause)) {
TTY.println("Treating CompilationFailureAction as ExitVM due to assertion failure in libgraal: " + cause);
return ExitVM;
}
}
return super.lookupAction(values, cause);
}
/**
* Determines if {@code throwable} should result in a VM exit.
*/
private static boolean shouldExitVM(Throwable throwable) {
// If not in libgraal, don't exit
if (LibGraalSupport.INSTANCE == null) {
return false;
}
// If assertions are not enabled, don't exit.
if (!Assertions.assertionsEnabled()) {
return false;
}
// A normal assertion error => exit.
if (throwable instanceof AssertionError) {
return true;
}
// A GraalError not caused by an OOME => exit.
if (throwable instanceof GraalError && isNotCausedByOOME(throwable)) {
return true;
}
return false;
}
/**
* Determines if {@code throwable} has a causality chain denoting an OutOfMemoryError. This
* can happen in GC stress tests and exiting the VM would cause the test to fail.
*/
private static boolean isNotCausedByOOME(Throwable throwable) {
Throwable t = throwable;
while (t != null) {
if (t instanceof OutOfMemoryError) {
return false;
}
t = t.getCause();
}
return true;
}
@SuppressWarnings("try")
@Override
protected HotSpotCompilationRequestResult performCompilation(DebugContext debug) {
HotSpotResolvedJavaMethod method = getMethod();
if (ReplayCompilationSupport.matchesRecordCompilationFilter(debug.getOptions(), method) || compiler.getGraalRuntime().getReplayCompilationSupport() != null) {
return performCompilationWithReplaySupport(debug);
}
int entryBCI = getEntryBCI();
final CompilationPrinter printer = CompilationPrinter.begin(debug.getOptions(), compilationId, method, entryBCI);
try (DebugContext.Scope s = debug.scope("Compiling", new DebugDumpScope(getIdString(), true))) {
graph = compiler.createGraph(method, entryBCI, profileProvider, compilationId, debug.getOptions(), debug);
Suites suites = compiler.getSuites(compiler.getGraalRuntime().getHostProviders(), debug.getOptions());
adjustSuitesForRecompilation(debug.getOptions(), suites);
result = compiler.compile(graph, shouldRetainLocalVariables, shouldUsePreciseUnresolvedDeopts, eagerResolving, compilationId, debug, suites);
performRecompilationCheck(debug.getOptions(), method);
} catch (Throwable e) {
throw debug.handle(e);
}
try (DebugCloseable b = CodeInstallationTime.start(debug)) {
installMethod(compiler.getGraalRuntime().getHostBackend(), debug, graph, result);
}
// Installation is included in compilation time and memory usage reported by printer
printer.finish(result, installedCode);
return buildCompilationRequestResult(method);
}
/**
* Modifies the provided suites to prevent excessive recompilation if necessary.
*
* @param options the option values
* @param suites the suites to modify
*/
private void adjustSuitesForRecompilation(OptionValues options, Suites suites) {
if (checkRecompileCycle && (MethodRecompilationLimit.getValue(options) < 0 || decompileCount < MethodRecompilationLimit.getValue(options))) {
/*
* Disable DeoptimizationGroupingPhase to simplify the creation of the speculations
* for each deopt.
*/
ListIterator<BasePhase<? super MidTierContext>> phase = suites.getMidTier().findPhase(DeoptimizationGroupingPhase.class);
if (phase != null) {
phase.remove();
}
ListIterator<BasePhase<? super LowTierContext>> lowTierPhasesIterator = suites.getLowTier().findPhase(SchedulePhase.FinalSchedulePhase.class);
if (lowTierPhasesIterator != null) {
lowTierPhasesIterator.previous();
lowTierPhasesIterator.add(new ForceDeoptSpeculationPhase(decompileCount));
}
}
}
/**
* Checks whether the recompilation limit is exceeded, and if so, throws an exception.
*
* @param options the option values
* @param method the compiled method
*/
private void performRecompilationCheck(OptionValues options, HotSpotResolvedJavaMethod method) {
if (checkRecompileCycle && (MethodRecompilationLimit.getValue(options) >= 0 && decompileCount >= MethodRecompilationLimit.getValue(options))) {
ProfilingInfo info = profileProvider.getProfilingInfo(method);
throw new ForceDeoptSpeculationPhase.TooManyDeoptimizationsError("too many decompiles: " + decompileCount + " " + ForceDeoptSpeculationPhase.getDeoptSummary(info));
}
}
private static final TimerKey CompilationReplayTime = DebugContext.timer("CompilationReplayTime").doc("The time spent in recorded/replayed compilations.");
private static final CounterKey CompilationReplayBytecodes = DebugContext.counter("CompilationReplayBytecodes").doc("The size of bytecodes compiled in recorded/replayed compilations.");
/**
* Performs a recorded or replayed compilation.
*
* @param initialDebug the initial debug context
* @return the compilation result
*/
@SuppressWarnings("try")
private HotSpotCompilationRequestResult performCompilationWithReplaySupport(DebugContext initialDebug) {
OptionValues options = initialDebug.getOptions();
HotSpotGraalCompiler selectedCompiler;
if (compiler.getGraalRuntime().getReplayCompilationSupport() != null) {
selectedCompiler = compiler;
} else {
CompilerConfigurationFactory configFactory = CompilerConfigurationFactory.selectFactory(compiler.getGraalRuntime().getCompilerConfigurationName(), options, jvmciRuntime);
selectedCompiler = HotSpotGraalCompilerFactory.createCompiler("VM-record", jvmciRuntime, options, configFactory, ReplayCompilationSupport.createRecording(configFactory.getName()));
}
ReplayCompilationSupport replaySupport = selectedCompiler.getGraalRuntime().getReplayCompilationSupport();
HotSpotCompilationRequest request = getRequest();
try (DebugCloseable closeable = replaySupport.enterCompilationContext(request, options)) {
request = replaySupport.decorateCompilationRequest(request);
HotSpotResolvedJavaMethod method = request.getMethod();
/*
* Passing a snippet reflection instance to the debug handlers would cause replay
* failures.
*/
List<DebugDumpHandlersFactory> debugHandlersFactories = List.of(new GraalDebugHandlersFactory(null));
PrintStream selectedPrintStream = initialDebug.getConfig() == null ? DebugContext.getDefaultLogStream() : initialDebug.getConfig().output();
try (DebugContext debug = selectedCompiler.getGraalRuntime().openDebugContext(options, compilationId, method, debugHandlersFactories, selectedPrintStream);
DebugContext.Activation a = debug.activate();
DebugCloseable d = replaySupport.withDebugContext(debug);
DebugCloseable c = initialDebug.inRetryCompilation() ? debug.openRetryCompilation() : null;
DebugCloseable t = CompilationReplayTime.start(debug)) {
int entryBCI = getEntryBCI();
CompilationPrinter printer = CompilationPrinter.begin(options, compilationId, method, entryBCI);
if (initialDebug.inRetryCompilation()) {
profileProvider.forQueriedProfiles((profileKey, profilingInfo) -> {
replaySupport.injectProfiles(profileKey.method(), profileKey.includeNormal(), profileKey.includeOSR(), profilingInfo);
});
}
ProfileProvider selectedProfileProvider = new StableProfileProvider();
try (DebugContext.Scope s = debug.scope("Compiling with replay support", new DebugDumpScope(getIdString(), true))) {
graph = selectedCompiler.createGraph(method, entryBCI, selectedProfileProvider, compilationId, options, debug);
Suites suites = compiler.getSuites(compiler.getGraalRuntime().getHostProviders(), debug.getOptions());
adjustSuitesForRecompilation(options, suites);
result = selectedCompiler.compile(graph, shouldRetainLocalVariables, shouldUsePreciseUnresolvedDeopts, eagerResolving, compilationId, debug, suites);
performRecompilationCheck(options, method);
CompilationReplayBytecodes.add(debug, result.getBytecodeSize());
} catch (Throwable e) {
throw debug.handle(e);
}
try (DebugCloseable b = CodeInstallationTime.start(debug)) {
installMethod(selectedCompiler.getGraalRuntime().getHostBackend(), debug, graph, result);
}
printer.finish(result, installedCode);
replaySupport.recordCompilationArtifacts(graph, result);
return buildCompilationRequestResult(method);
}
}
}
protected HotSpotCompilationRequestResult buildCompilationRequestResult(HotSpotResolvedJavaMethod method) {
// For compilation of substitutions the method in the compilation request might be
// different than the actual method parsed. The root of the compilation will always
// be the first method in the methods list, so use that instead.
ResolvedJavaMethod rootMethod = result.getMethods()[0];
int inlinedBytecodes = result.getBytecodeSize() - rootMethod.getCodeSize();
assert inlinedBytecodes >= 0 : rootMethod + " " + method;
return HotSpotCompilationRequestResult.success(inlinedBytecodes);
}
}
public CompilationTask(HotSpotJVMCIRuntime jvmciRuntime,
HotSpotGraalCompiler compiler,
HotSpotCompilationRequest request,
boolean useProfilingInfo,
boolean installAsDefault) {
this(jvmciRuntime, compiler, request, useProfilingInfo, false, false, false, installAsDefault);
}
public CompilationTask(HotSpotJVMCIRuntime jvmciRuntime,
HotSpotGraalCompiler compiler,
HotSpotCompilationRequest request,
boolean useProfilingInfo,
boolean shouldRetainLocalVariables,
boolean shouldUsePreciseUnresolvedDeopts,
boolean installAsDefault) {
this(jvmciRuntime, compiler, request, useProfilingInfo, shouldRetainLocalVariables, shouldUsePreciseUnresolvedDeopts, false, installAsDefault);
}
public CompilationTask(HotSpotJVMCIRuntime jvmciRuntime,
HotSpotGraalCompiler compiler,
HotSpotCompilationRequest request,
boolean useProfilingInfo,
boolean shouldRetainLocalVariables,
boolean shouldUsePreciseUnresolvedDeopts,
boolean eagerResolving,
boolean installAsDefault) {
this.jvmciRuntime = jvmciRuntime;
this.compiler = compiler;
this.compilationId = new HotSpotCompilationIdentifier(request);
this.profileProvider = useProfilingInfo ? new StableProfileProvider() : null;
this.shouldRetainLocalVariables = shouldRetainLocalVariables;
this.shouldUsePreciseUnresolvedDeopts = shouldUsePreciseUnresolvedDeopts;
this.eagerResolving = eagerResolving;
this.installAsDefault = installAsDefault;
this.decompileCount = HotSpotGraalServices.getDecompileCount(request.getMethod());
}
public void setTypeFilter(TypeFilter typeFilter) {
this.profileSaveFilter = typeFilter;
}
public OptionValues filterOptions(OptionValues originalOptions) {
HotSpotGraalRuntimeProvider graalRuntime = compiler.getGraalRuntime();
GraalHotSpotVMConfig config = graalRuntime.getVMConfig();
OptionValues newOptions = originalOptions;
// Set any options for this compile.
String perMethodOptions = Options.PerMethodOptions.getValue(originalOptions);
if (perMethodOptions != null) {
EconomicMap<OptionKey<?>, Object> values = null;
try {
EconomicMap<String, String> optionSettings = null;
for (String option : OptionsParser.splitOptions(perMethodOptions)) {
String prefix = "MethodFilter:";
if (option.startsWith(prefix)) {
MethodFilter filter = MethodFilter.parse(option.substring(prefix.length()));
if (filter.matches(getMethod())) {
// Begin accumulating options
optionSettings = EconomicMap.create();
} else if (optionSettings != null) {
// This is a new MethodFilter: so stop collecting options
break;
}
} else if (optionSettings != null) {
OptionsParser.parseOptionSettingTo(option, optionSettings);
}
}
if (optionSettings != null) {
if (optionSettings.isEmpty()) {
throw new IllegalArgumentException("No options specified for MethodFilter:");
}
values = EconomicMap.create();
OptionsParser.parseOptions(optionSettings, values, OptionsParser.getOptionsLoader());
}
} catch (Exception e) {
values = null;
TTY.println(e.toString());
TTY.println("Errors encountered during " + Options.PerMethodOptions.getName() + " parsing. Exiting...");
HotSpotGraalServices.exit(-1, jvmciRuntime);
}
if (values != null) {
newOptions = new OptionValues(newOptions, values);
if (PrintCompilation.getValue(newOptions)) {
TTY.println("Compiling " + getMethod() + " with extra options: " + new OptionValues(values));
}
}
}
/*
* Disable inlining if HotSpot has it disabled unless it's been explicitly set in Graal.
*/
if (!config.inline) {
EconomicMap<OptionKey<?>, Object> m = OptionValues.newOptionMap();
if (Inline.getValue(newOptions) && !Inline.hasBeenSet(newOptions)) {
m.put(Inline, false);
}
if (InlineDuringParsing.getValue(newOptions) && !InlineDuringParsing.hasBeenSet(newOptions)) {
m.put(InlineDuringParsing, false);
}
if (!m.isEmpty()) {
newOptions = new OptionValues(newOptions, m);
}
}
return newOptions;
}
public HotSpotResolvedJavaMethod getMethod() {
return getRequest().getMethod();
}
CompilationIdentifier getCompilationIdentifier() {
return compilationId;
}
/**
* Returns the HotSpot id of this compilation.
*
* @return HotSpot compile id
*/
public int getId() {
return getRequest().getId();
}
public int getEntryBCI() {
return getRequest().getEntryBCI();
}
public StableProfileProvider getProfileProvider() {
return profileProvider;
}
/**
* @return the compilation id plus a trailing '%' if the compilation is an OSR to match
* PrintCompilation style output
*/
public String getIdString() {
if (getEntryBCI() != JVMCICompiler.INVOCATION_ENTRY_BCI) {
return getId() + "%";
} else {
return Integer.toString(getId());
}
}
public HotSpotInstalledCode getInstalledCode() {
return installedCode;
}
/**
* Time spent in compilation.
*/
public static final TimerKey CompilationTime = DebugContext.timer("CompilationTime").doc("Time spent in compilation and code installation.");
/**
* Counts the number of compiled {@linkplain CompilationResult#getBytecodeSize() bytecodes}.
*/
private static final CounterKey CompiledBytecodes = DebugContext.counter("CompiledBytecodes");
/**
* Counts the number of compiled {@linkplain CompilationResult#getBytecodeSize() bytecodes} for
* which {@linkplain CompilationResult#getTargetCode()} code was installed.
*/
public static final CounterKey CompiledAndInstalledBytecodes = DebugContext.counter("CompiledAndInstalledBytecodes");
/**
* Counts the number of installed {@linkplain CompilationResult#getTargetCodeSize()} bytes.
*/
private static final CounterKey InstalledCodeSize = DebugContext.counter("InstalledCodeSize");
/**
* Time spent in code installation.
*/
public static final TimerKey CodeInstallationTime = DebugContext.timer("CodeInstallation");
/**
* Time spent in hinted full GC.
*/
public static final TimerKey HintedFullGC = DebugContext.timer("HintedFullGC").doc("Time spent in hinted GC performed at the end of compilations.");
public HotSpotCompilationRequestResult runCompilation(OptionValues initialOptions) {
OptionValues options = filterOptions(initialOptions);
HotSpotGraalRuntimeProvider graalRuntime = compiler.getGraalRuntime();
try (DebugContext debug = graalRuntime.openDebugContext(options, compilationId, getMethod(), compiler.getDebugHandlersFactories(), DebugContext.getDefaultLogStream())) {
return runCompilation(debug);
}
}
@SuppressWarnings({"try"})
public HotSpotCompilationRequestResult runCompilation(DebugContext debug) {
try (DebugCloseable a = CompilationTime.start(debug); DebugCloseable b = GraalServices.GCTimerScope.create(debug)) {
HotSpotCompilationRequestResult result = runCompilation(debug, new HotSpotCompilationWrapper());
LibGraalSupport libgraal = LibGraalSupport.INSTANCE;
if (libgraal != null) {
/*
* Notify the libgraal runtime that most objects allocated in the current
* compilation are dead and can be reclaimed.
*/
try (DebugCloseable timer = HintedFullGC.start(debug)) {
libgraal.notifyLowMemoryPoint(true);
libgraal.processReferences();
}
}
return result;
}
}
@SuppressWarnings({"try", "unchecked"})
protected HotSpotCompilationRequestResult runCompilation(DebugContext debug, HotSpotCompilationWrapper compilation) {
HotSpotGraalRuntimeProvider graalRuntime = compiler.getGraalRuntime();
GraalHotSpotVMConfig config = graalRuntime.getVMConfig();
int entryBCI = getEntryBCI();
boolean isOSR = entryBCI != JVMCICompiler.INVOCATION_ENTRY_BCI;
HotSpotResolvedJavaMethod method = getMethod();
if (installAsDefault || isOSR) {
// If there is already compiled code for this method on our level we simply return.
// JVMCI compiles are always at the highest compile level, even in non-tiered mode so we
// only need to check for that value.
if (method.hasCodeAtLevel(entryBCI, config.compilationLevelFullOptimization)) {
return HotSpotCompilationRequestResult.failure("Already compiled", false);
}
if (HotSpotGraalCompilerFactory.shouldExclude(method)) {
return HotSpotCompilationRequestResult.failure("GraalCompileOnly excluded", false);
}
}
ProfileReplaySupport result = ProfileReplaySupport.profileReplayPrologue(debug, entryBCI, method, profileProvider, profileSaveFilter);
try {
return compilation.run(debug);
} finally {
try {
if (compilation.result != null) {
int compiledBytecodes = compilation.result.getBytecodeSize();
CompiledBytecodes.add(debug, compiledBytecodes);
if (installedCode != null) {
int codeSize = installedCode.getSize();
CompiledAndInstalledBytecodes.add(debug, compiledBytecodes);
InstalledCodeSize.add(debug, codeSize);
}
if (result != null && result.getExpectedResult() != null && !result.getExpectedResult()) {
TTY.printf("Expected failure: %s %s%n", method.format("%H.%n(%P)%R"), entryBCI);
}
}
if (result != null) {
result.profileReplayEpilogue(debug, compilation.result, compilation.graph, profileProvider, compilationId, entryBCI, method);
}
} catch (Throwable t) {
return compilation.handleException(t);
}
}
}
@SuppressWarnings("try")
protected void installMethod(HotSpotBackend backend, DebugContext debug, StructuredGraph graph, final CompilationResult compResult) {
installedCode = null;
Object[] context = {new DebugDumpScope(getIdString(), true), backend.getProviders().getCodeCache(), getMethod(), compResult};
try (DebugContext.Scope s = debug.scope("CodeInstall", context, graph)) {
HotSpotCompilationRequest request = getRequest();
// By default, we only profile deoptimizations for compiled methods installed as
// default.
installedCode = (HotSpotInstalledCode) backend.createInstalledCode(debug,
request.getMethod(),
request,
compResult,
null,
installAsDefault,
installAsDefault,
context);
} catch (Throwable e) {
throw debug.handle(e);
}
}
@Override
public String toString() {
return "Compilation[id=" + getId() + ", " + getMethod().format("%H.%n(%p)") + (getEntryBCI() == JVMCICompiler.INVOCATION_ENTRY_BCI ? "" : "@" + getEntryBCI()) + "]";
}
private HotSpotCompilationRequest getRequest() {
return compilationId.getRequest();
}
}
|
googleapis/google-cloud-java | 35,988 | java-tpu/proto-google-cloud-tpu-v2alpha1/src/main/java/com/google/cloud/tpu/v2alpha1/ListReservationsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/tpu/v2alpha1/cloud_tpu.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.tpu.v2alpha1;
/**
*
*
* <pre>
* Response for
* [ListReservations][google.cloud.tpu.v2alpha1.Tpu.ListReservations].
* </pre>
*
* Protobuf type {@code google.cloud.tpu.v2alpha1.ListReservationsResponse}
*/
public final class ListReservationsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.tpu.v2alpha1.ListReservationsResponse)
ListReservationsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListReservationsResponse.newBuilder() to construct.
private ListReservationsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListReservationsResponse() {
reservations_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListReservationsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.tpu.v2alpha1.CloudTpuProto
.internal_static_google_cloud_tpu_v2alpha1_ListReservationsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.tpu.v2alpha1.CloudTpuProto
.internal_static_google_cloud_tpu_v2alpha1_ListReservationsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.tpu.v2alpha1.ListReservationsResponse.class,
com.google.cloud.tpu.v2alpha1.ListReservationsResponse.Builder.class);
}
public static final int RESERVATIONS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.tpu.v2alpha1.Reservation> reservations_;
/**
*
*
* <pre>
* The listed reservations.
* </pre>
*
* <code>repeated .google.cloud.tpu.v2alpha1.Reservation reservations = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.tpu.v2alpha1.Reservation> getReservationsList() {
return reservations_;
}
/**
*
*
* <pre>
* The listed reservations.
* </pre>
*
* <code>repeated .google.cloud.tpu.v2alpha1.Reservation reservations = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.tpu.v2alpha1.ReservationOrBuilder>
getReservationsOrBuilderList() {
return reservations_;
}
/**
*
*
* <pre>
* The listed reservations.
* </pre>
*
* <code>repeated .google.cloud.tpu.v2alpha1.Reservation reservations = 1;</code>
*/
@java.lang.Override
public int getReservationsCount() {
return reservations_.size();
}
/**
*
*
* <pre>
* The listed reservations.
* </pre>
*
* <code>repeated .google.cloud.tpu.v2alpha1.Reservation reservations = 1;</code>
*/
@java.lang.Override
public com.google.cloud.tpu.v2alpha1.Reservation getReservations(int index) {
return reservations_.get(index);
}
/**
*
*
* <pre>
* The listed reservations.
* </pre>
*
* <code>repeated .google.cloud.tpu.v2alpha1.Reservation reservations = 1;</code>
*/
@java.lang.Override
public com.google.cloud.tpu.v2alpha1.ReservationOrBuilder getReservationsOrBuilder(int index) {
return reservations_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* The next page token or empty if none.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* The next page token or empty if none.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < reservations_.size(); i++) {
output.writeMessage(1, reservations_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < reservations_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, reservations_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.tpu.v2alpha1.ListReservationsResponse)) {
return super.equals(obj);
}
com.google.cloud.tpu.v2alpha1.ListReservationsResponse other =
(com.google.cloud.tpu.v2alpha1.ListReservationsResponse) obj;
if (!getReservationsList().equals(other.getReservationsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getReservationsCount() > 0) {
hash = (37 * hash) + RESERVATIONS_FIELD_NUMBER;
hash = (53 * hash) + getReservationsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.tpu.v2alpha1.ListReservationsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.tpu.v2alpha1.ListReservationsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.tpu.v2alpha1.ListReservationsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.tpu.v2alpha1.ListReservationsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.tpu.v2alpha1.ListReservationsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.tpu.v2alpha1.ListReservationsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.tpu.v2alpha1.ListReservationsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.tpu.v2alpha1.ListReservationsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.tpu.v2alpha1.ListReservationsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.tpu.v2alpha1.ListReservationsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.tpu.v2alpha1.ListReservationsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.tpu.v2alpha1.ListReservationsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.tpu.v2alpha1.ListReservationsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response for
* [ListReservations][google.cloud.tpu.v2alpha1.Tpu.ListReservations].
* </pre>
*
* Protobuf type {@code google.cloud.tpu.v2alpha1.ListReservationsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.tpu.v2alpha1.ListReservationsResponse)
com.google.cloud.tpu.v2alpha1.ListReservationsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.tpu.v2alpha1.CloudTpuProto
.internal_static_google_cloud_tpu_v2alpha1_ListReservationsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.tpu.v2alpha1.CloudTpuProto
.internal_static_google_cloud_tpu_v2alpha1_ListReservationsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.tpu.v2alpha1.ListReservationsResponse.class,
com.google.cloud.tpu.v2alpha1.ListReservationsResponse.Builder.class);
}
// Construct using com.google.cloud.tpu.v2alpha1.ListReservationsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (reservationsBuilder_ == null) {
reservations_ = java.util.Collections.emptyList();
} else {
reservations_ = null;
reservationsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.tpu.v2alpha1.CloudTpuProto
.internal_static_google_cloud_tpu_v2alpha1_ListReservationsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.tpu.v2alpha1.ListReservationsResponse getDefaultInstanceForType() {
return com.google.cloud.tpu.v2alpha1.ListReservationsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.tpu.v2alpha1.ListReservationsResponse build() {
com.google.cloud.tpu.v2alpha1.ListReservationsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.tpu.v2alpha1.ListReservationsResponse buildPartial() {
com.google.cloud.tpu.v2alpha1.ListReservationsResponse result =
new com.google.cloud.tpu.v2alpha1.ListReservationsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.tpu.v2alpha1.ListReservationsResponse result) {
if (reservationsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
reservations_ = java.util.Collections.unmodifiableList(reservations_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.reservations_ = reservations_;
} else {
result.reservations_ = reservationsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.tpu.v2alpha1.ListReservationsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.tpu.v2alpha1.ListReservationsResponse) {
return mergeFrom((com.google.cloud.tpu.v2alpha1.ListReservationsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.tpu.v2alpha1.ListReservationsResponse other) {
if (other == com.google.cloud.tpu.v2alpha1.ListReservationsResponse.getDefaultInstance())
return this;
if (reservationsBuilder_ == null) {
if (!other.reservations_.isEmpty()) {
if (reservations_.isEmpty()) {
reservations_ = other.reservations_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureReservationsIsMutable();
reservations_.addAll(other.reservations_);
}
onChanged();
}
} else {
if (!other.reservations_.isEmpty()) {
if (reservationsBuilder_.isEmpty()) {
reservationsBuilder_.dispose();
reservationsBuilder_ = null;
reservations_ = other.reservations_;
bitField0_ = (bitField0_ & ~0x00000001);
reservationsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getReservationsFieldBuilder()
: null;
} else {
reservationsBuilder_.addAllMessages(other.reservations_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.tpu.v2alpha1.Reservation m =
input.readMessage(
com.google.cloud.tpu.v2alpha1.Reservation.parser(), extensionRegistry);
if (reservationsBuilder_ == null) {
ensureReservationsIsMutable();
reservations_.add(m);
} else {
reservationsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.tpu.v2alpha1.Reservation> reservations_ =
java.util.Collections.emptyList();
private void ensureReservationsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
reservations_ =
new java.util.ArrayList<com.google.cloud.tpu.v2alpha1.Reservation>(reservations_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.tpu.v2alpha1.Reservation,
com.google.cloud.tpu.v2alpha1.Reservation.Builder,
com.google.cloud.tpu.v2alpha1.ReservationOrBuilder>
reservationsBuilder_;
/**
*
*
* <pre>
* The listed reservations.
* </pre>
*
* <code>repeated .google.cloud.tpu.v2alpha1.Reservation reservations = 1;</code>
*/
public java.util.List<com.google.cloud.tpu.v2alpha1.Reservation> getReservationsList() {
if (reservationsBuilder_ == null) {
return java.util.Collections.unmodifiableList(reservations_);
} else {
return reservationsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The listed reservations.
* </pre>
*
* <code>repeated .google.cloud.tpu.v2alpha1.Reservation reservations = 1;</code>
*/
public int getReservationsCount() {
if (reservationsBuilder_ == null) {
return reservations_.size();
} else {
return reservationsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The listed reservations.
* </pre>
*
* <code>repeated .google.cloud.tpu.v2alpha1.Reservation reservations = 1;</code>
*/
public com.google.cloud.tpu.v2alpha1.Reservation getReservations(int index) {
if (reservationsBuilder_ == null) {
return reservations_.get(index);
} else {
return reservationsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The listed reservations.
* </pre>
*
* <code>repeated .google.cloud.tpu.v2alpha1.Reservation reservations = 1;</code>
*/
public Builder setReservations(int index, com.google.cloud.tpu.v2alpha1.Reservation value) {
if (reservationsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureReservationsIsMutable();
reservations_.set(index, value);
onChanged();
} else {
reservationsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The listed reservations.
* </pre>
*
* <code>repeated .google.cloud.tpu.v2alpha1.Reservation reservations = 1;</code>
*/
public Builder setReservations(
int index, com.google.cloud.tpu.v2alpha1.Reservation.Builder builderForValue) {
if (reservationsBuilder_ == null) {
ensureReservationsIsMutable();
reservations_.set(index, builderForValue.build());
onChanged();
} else {
reservationsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The listed reservations.
* </pre>
*
* <code>repeated .google.cloud.tpu.v2alpha1.Reservation reservations = 1;</code>
*/
public Builder addReservations(com.google.cloud.tpu.v2alpha1.Reservation value) {
if (reservationsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureReservationsIsMutable();
reservations_.add(value);
onChanged();
} else {
reservationsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The listed reservations.
* </pre>
*
* <code>repeated .google.cloud.tpu.v2alpha1.Reservation reservations = 1;</code>
*/
public Builder addReservations(int index, com.google.cloud.tpu.v2alpha1.Reservation value) {
if (reservationsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureReservationsIsMutable();
reservations_.add(index, value);
onChanged();
} else {
reservationsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The listed reservations.
* </pre>
*
* <code>repeated .google.cloud.tpu.v2alpha1.Reservation reservations = 1;</code>
*/
public Builder addReservations(
com.google.cloud.tpu.v2alpha1.Reservation.Builder builderForValue) {
if (reservationsBuilder_ == null) {
ensureReservationsIsMutable();
reservations_.add(builderForValue.build());
onChanged();
} else {
reservationsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The listed reservations.
* </pre>
*
* <code>repeated .google.cloud.tpu.v2alpha1.Reservation reservations = 1;</code>
*/
public Builder addReservations(
int index, com.google.cloud.tpu.v2alpha1.Reservation.Builder builderForValue) {
if (reservationsBuilder_ == null) {
ensureReservationsIsMutable();
reservations_.add(index, builderForValue.build());
onChanged();
} else {
reservationsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The listed reservations.
* </pre>
*
* <code>repeated .google.cloud.tpu.v2alpha1.Reservation reservations = 1;</code>
*/
public Builder addAllReservations(
java.lang.Iterable<? extends com.google.cloud.tpu.v2alpha1.Reservation> values) {
if (reservationsBuilder_ == null) {
ensureReservationsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, reservations_);
onChanged();
} else {
reservationsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The listed reservations.
* </pre>
*
* <code>repeated .google.cloud.tpu.v2alpha1.Reservation reservations = 1;</code>
*/
public Builder clearReservations() {
if (reservationsBuilder_ == null) {
reservations_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
reservationsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The listed reservations.
* </pre>
*
* <code>repeated .google.cloud.tpu.v2alpha1.Reservation reservations = 1;</code>
*/
public Builder removeReservations(int index) {
if (reservationsBuilder_ == null) {
ensureReservationsIsMutable();
reservations_.remove(index);
onChanged();
} else {
reservationsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The listed reservations.
* </pre>
*
* <code>repeated .google.cloud.tpu.v2alpha1.Reservation reservations = 1;</code>
*/
public com.google.cloud.tpu.v2alpha1.Reservation.Builder getReservationsBuilder(int index) {
return getReservationsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The listed reservations.
* </pre>
*
* <code>repeated .google.cloud.tpu.v2alpha1.Reservation reservations = 1;</code>
*/
public com.google.cloud.tpu.v2alpha1.ReservationOrBuilder getReservationsOrBuilder(int index) {
if (reservationsBuilder_ == null) {
return reservations_.get(index);
} else {
return reservationsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The listed reservations.
* </pre>
*
* <code>repeated .google.cloud.tpu.v2alpha1.Reservation reservations = 1;</code>
*/
public java.util.List<? extends com.google.cloud.tpu.v2alpha1.ReservationOrBuilder>
getReservationsOrBuilderList() {
if (reservationsBuilder_ != null) {
return reservationsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(reservations_);
}
}
/**
*
*
* <pre>
* The listed reservations.
* </pre>
*
* <code>repeated .google.cloud.tpu.v2alpha1.Reservation reservations = 1;</code>
*/
public com.google.cloud.tpu.v2alpha1.Reservation.Builder addReservationsBuilder() {
return getReservationsFieldBuilder()
.addBuilder(com.google.cloud.tpu.v2alpha1.Reservation.getDefaultInstance());
}
/**
*
*
* <pre>
* The listed reservations.
* </pre>
*
* <code>repeated .google.cloud.tpu.v2alpha1.Reservation reservations = 1;</code>
*/
public com.google.cloud.tpu.v2alpha1.Reservation.Builder addReservationsBuilder(int index) {
return getReservationsFieldBuilder()
.addBuilder(index, com.google.cloud.tpu.v2alpha1.Reservation.getDefaultInstance());
}
/**
*
*
* <pre>
* The listed reservations.
* </pre>
*
* <code>repeated .google.cloud.tpu.v2alpha1.Reservation reservations = 1;</code>
*/
public java.util.List<com.google.cloud.tpu.v2alpha1.Reservation.Builder>
getReservationsBuilderList() {
return getReservationsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.tpu.v2alpha1.Reservation,
com.google.cloud.tpu.v2alpha1.Reservation.Builder,
com.google.cloud.tpu.v2alpha1.ReservationOrBuilder>
getReservationsFieldBuilder() {
if (reservationsBuilder_ == null) {
reservationsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.tpu.v2alpha1.Reservation,
com.google.cloud.tpu.v2alpha1.Reservation.Builder,
com.google.cloud.tpu.v2alpha1.ReservationOrBuilder>(
reservations_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
reservations_ = null;
}
return reservationsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* The next page token or empty if none.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The next page token or empty if none.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The next page token or empty if none.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The next page token or empty if none.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The next page token or empty if none.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.tpu.v2alpha1.ListReservationsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.tpu.v2alpha1.ListReservationsResponse)
private static final com.google.cloud.tpu.v2alpha1.ListReservationsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.tpu.v2alpha1.ListReservationsResponse();
}
public static com.google.cloud.tpu.v2alpha1.ListReservationsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListReservationsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListReservationsResponse>() {
@java.lang.Override
public ListReservationsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListReservationsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListReservationsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.tpu.v2alpha1.ListReservationsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
google/filament | 36,089 | third_party/dawn/third_party/protobuf/java/core/src/test/java/com/google/protobuf/MapLiteTest.java | // Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
package com.google.protobuf;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth.assertWithMessage;
import map_lite_test.MapTestProto.BizarroTestMap;
import map_lite_test.MapTestProto.TestMap;
import map_lite_test.MapTestProto.TestMap.MessageValue;
import map_lite_test.MapTestProto.TestMapOrBuilder;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Unit tests for map fields. */
@RunWith(JUnit4.class)
public final class MapLiteTest {
private void setMapValues(TestMap.Builder builder) {
builder
.putInt32ToInt32Field(1, 11)
.putInt32ToInt32Field(2, 22)
.putInt32ToInt32Field(3, 33)
.putInt32ToStringField(1, "11")
.putInt32ToStringField(2, "22")
.putInt32ToStringField(3, "33")
.putInt32ToBytesField(1, TestUtil.toBytes("11"))
.putInt32ToBytesField(2, TestUtil.toBytes("22"))
.putInt32ToBytesField(3, TestUtil.toBytes("33"))
.putInt32ToEnumField(1, TestMap.EnumValue.FOO)
.putInt32ToEnumField(2, TestMap.EnumValue.BAR)
.putInt32ToEnumField(3, TestMap.EnumValue.BAZ)
.putInt32ToMessageField(1, MessageValue.newBuilder().setValue(11).build())
.putInt32ToMessageField(2, MessageValue.newBuilder().setValue(22).build())
.putInt32ToMessageField(3, MessageValue.newBuilder().setValue(33).build())
.putStringToInt32Field("1", 11)
.putStringToInt32Field("2", 22)
.putStringToInt32Field("3", 33);
}
@Test
public void testSetMapValues() {
TestMap.Builder testMapBuilder = TestMap.newBuilder();
setMapValues(testMapBuilder);
TestMap testMap = testMapBuilder.build();
assertMapValuesSet(testMap);
}
private void copyMapValues(TestMap source, TestMap.Builder destination) {
destination
.putAllInt32ToInt32Field(source.getInt32ToInt32FieldMap())
.putAllInt32ToStringField(source.getInt32ToStringFieldMap())
.putAllInt32ToBytesField(source.getInt32ToBytesFieldMap())
.putAllInt32ToEnumField(source.getInt32ToEnumFieldMap())
.putAllInt32ToMessageField(source.getInt32ToMessageFieldMap())
.putAllStringToInt32Field(source.getStringToInt32FieldMap());
}
private void assertMapValuesSet(TestMap message) {
assertThat(message.getInt32ToInt32FieldMap()).hasSize(3);
assertThat(message.getInt32ToInt32FieldMap().get(1).intValue()).isEqualTo(11);
assertThat(message.getInt32ToInt32FieldMap().get(2).intValue()).isEqualTo(22);
assertThat(message.getInt32ToInt32FieldMap().get(3).intValue()).isEqualTo(33);
assertThat(message.getInt32ToStringFieldMap()).hasSize(3);
assertThat(message.getInt32ToStringFieldMap()).containsEntry(1, "11");
assertThat(message.getInt32ToStringFieldMap()).containsEntry(2, "22");
assertThat(message.getInt32ToStringFieldMap()).containsEntry(3, "33");
assertThat(message.getInt32ToBytesFieldMap()).hasSize(3);
assertThat(message.getInt32ToBytesFieldMap()).containsEntry(1, TestUtil.toBytes("11"));
assertThat(message.getInt32ToBytesFieldMap()).containsEntry(2, TestUtil.toBytes("22"));
assertThat(message.getInt32ToBytesFieldMap()).containsEntry(3, TestUtil.toBytes("33"));
assertThat(message.getInt32ToEnumFieldMap()).hasSize(3);
assertThat(message.getInt32ToEnumFieldMap()).containsEntry(1, TestMap.EnumValue.FOO);
assertThat(message.getInt32ToEnumFieldMap()).containsEntry(2, TestMap.EnumValue.BAR);
assertThat(message.getInt32ToEnumFieldMap()).containsEntry(3, TestMap.EnumValue.BAZ);
assertThat(message.getInt32ToMessageFieldMap()).hasSize(3);
assertThat(message.getInt32ToMessageFieldMap().get(1).getValue()).isEqualTo(11);
assertThat(message.getInt32ToMessageFieldMap().get(2).getValue()).isEqualTo(22);
assertThat(message.getInt32ToMessageFieldMap().get(3).getValue()).isEqualTo(33);
assertThat(message.getStringToInt32FieldMap()).hasSize(3);
assertThat(message.getStringToInt32FieldMap().get("1").intValue()).isEqualTo(11);
assertThat(message.getStringToInt32FieldMap().get("2").intValue()).isEqualTo(22);
assertThat(message.getStringToInt32FieldMap().get("3").intValue()).isEqualTo(33);
}
private void updateMapValues(TestMap.Builder builder) {
builder
.putInt32ToInt32Field(1, 111)
.removeInt32ToInt32Field(2)
.putInt32ToInt32Field(4, 44)
.putInt32ToStringField(1, "111")
.removeInt32ToStringField(2)
.putInt32ToStringField(4, "44")
.putInt32ToBytesField(1, TestUtil.toBytes("111"))
.removeInt32ToBytesField(2)
.putInt32ToBytesField(4, TestUtil.toBytes("44"))
.putInt32ToEnumField(1, TestMap.EnumValue.BAR)
.removeInt32ToEnumField(2)
.putInt32ToEnumField(4, TestMap.EnumValue.QUX)
.putInt32ToMessageField(1, MessageValue.newBuilder().setValue(111).build())
.removeInt32ToMessageField(2)
.putInt32ToMessageField(4, MessageValue.newBuilder().setValue(44).build())
.putStringToInt32Field("1", 111)
.removeStringToInt32Field("2")
.putStringToInt32Field("4", 44);
}
@Test
public void testUpdateMapValues() {
TestMap.Builder mapBuilder = TestMap.newBuilder();
setMapValues(mapBuilder);
TestMap map = mapBuilder.build();
assertMapValuesSet(map);
mapBuilder = map.toBuilder();
updateMapValues(mapBuilder);
map = mapBuilder.build();
assertMapValuesUpdated(map);
}
private void assertMapValuesUpdated(TestMap message) {
assertThat(message.getInt32ToInt32FieldMap()).hasSize(3);
assertThat(message.getInt32ToInt32FieldMap().get(1).intValue()).isEqualTo(111);
assertThat(message.getInt32ToInt32FieldMap().get(3).intValue()).isEqualTo(33);
assertThat(message.getInt32ToInt32FieldMap().get(4).intValue()).isEqualTo(44);
assertThat(message.getInt32ToStringFieldMap()).hasSize(3);
assertThat(message.getInt32ToStringFieldMap()).containsEntry(1, "111");
assertThat(message.getInt32ToStringFieldMap()).containsEntry(3, "33");
assertThat(message.getInt32ToStringFieldMap()).containsEntry(4, "44");
assertThat(message.getInt32ToBytesFieldMap()).hasSize(3);
assertThat(message.getInt32ToBytesFieldMap()).containsEntry(1, TestUtil.toBytes("111"));
assertThat(message.getInt32ToBytesFieldMap()).containsEntry(3, TestUtil.toBytes("33"));
assertThat(message.getInt32ToBytesFieldMap()).containsEntry(4, TestUtil.toBytes("44"));
assertThat(message.getInt32ToEnumFieldMap()).hasSize(3);
assertThat(message.getInt32ToEnumFieldMap()).containsEntry(1, TestMap.EnumValue.BAR);
assertThat(message.getInt32ToEnumFieldMap()).containsEntry(3, TestMap.EnumValue.BAZ);
assertThat(message.getInt32ToEnumFieldMap()).containsEntry(4, TestMap.EnumValue.QUX);
assertThat(message.getInt32ToMessageFieldMap()).hasSize(3);
assertThat(message.getInt32ToMessageFieldMap().get(1).getValue()).isEqualTo(111);
assertThat(message.getInt32ToMessageFieldMap().get(3).getValue()).isEqualTo(33);
assertThat(message.getInt32ToMessageFieldMap().get(4).getValue()).isEqualTo(44);
assertThat(message.getStringToInt32FieldMap()).hasSize(3);
assertThat(message.getStringToInt32FieldMap().get("1").intValue()).isEqualTo(111);
assertThat(message.getStringToInt32FieldMap().get("3").intValue()).isEqualTo(33);
assertThat(message.getStringToInt32FieldMap().get("4").intValue()).isEqualTo(44);
}
private void assertMapValuesCleared(TestMapOrBuilder testMapOrBuilder) {
assertThat(testMapOrBuilder.getInt32ToInt32FieldMap()).isEmpty();
assertThat(testMapOrBuilder.getInt32ToInt32FieldCount()).isEqualTo(0);
assertThat(testMapOrBuilder.getInt32ToStringFieldMap()).isEmpty();
assertThat(testMapOrBuilder.getInt32ToStringFieldCount()).isEqualTo(0);
assertThat(testMapOrBuilder.getInt32ToBytesFieldMap()).isEmpty();
assertThat(testMapOrBuilder.getInt32ToBytesFieldCount()).isEqualTo(0);
assertThat(testMapOrBuilder.getInt32ToEnumFieldMap()).isEmpty();
assertThat(testMapOrBuilder.getInt32ToEnumFieldCount()).isEqualTo(0);
assertThat(testMapOrBuilder.getInt32ToMessageFieldMap()).isEmpty();
assertThat(testMapOrBuilder.getInt32ToMessageFieldCount()).isEqualTo(0);
assertThat(testMapOrBuilder.getStringToInt32FieldMap()).isEmpty();
assertThat(testMapOrBuilder.getStringToInt32FieldCount()).isEqualTo(0);
}
@Test
public void testSanityCopyOnWrite() throws InvalidProtocolBufferException {
// Since builders are implemented as a thin wrapper around a message
// instance, we attempt to verify that we can't cause the builder to modify
// a produced message.
TestMap.Builder builder = TestMap.newBuilder();
TestMap message = builder.build();
builder.putInt32ToInt32Field(1, 2);
assertThat(message.getInt32ToInt32FieldMap()).isEmpty();
assertThat(builder.getInt32ToInt32FieldMap()).isEqualTo(newMap(1, 2));
message = builder.build();
builder.putInt32ToInt32Field(2, 3);
assertThat(message.getInt32ToInt32FieldMap()).isEqualTo(newMap(1, 2));
assertThat(builder.getInt32ToInt32FieldMap()).isEqualTo(newMap(1, 2, 2, 3));
}
@Test
public void testGetMapIsImmutable() {
TestMap.Builder builder = TestMap.newBuilder();
assertMapsAreImmutable(builder);
assertMapsAreImmutable(builder.build());
setMapValues(builder);
assertMapsAreImmutable(builder);
assertMapsAreImmutable(builder.build());
}
private void assertMapsAreImmutable(TestMapOrBuilder testMapOrBuilder) {
assertImmutable(testMapOrBuilder.getInt32ToInt32FieldMap(), 1, 2);
assertImmutable(testMapOrBuilder.getInt32ToStringFieldMap(), 1, "2");
assertImmutable(testMapOrBuilder.getInt32ToBytesFieldMap(), 1, TestUtil.toBytes("2"));
assertImmutable(testMapOrBuilder.getInt32ToEnumFieldMap(), 1, TestMap.EnumValue.FOO);
assertImmutable(
testMapOrBuilder.getInt32ToMessageFieldMap(), 1, MessageValue.getDefaultInstance());
assertImmutable(testMapOrBuilder.getStringToInt32FieldMap(), "1", 2);
}
private <K, V> void assertImmutable(Map<K, V> map, K key, V value) {
try {
map.put(key, value);
assertWithMessage("expected exception").fail();
} catch (UnsupportedOperationException e) {
// expected
}
if (!map.isEmpty()) {
try {
map.entrySet().remove(map.entrySet().iterator().next());
assertWithMessage("expected exception").fail();
} catch (UnsupportedOperationException e) {
// expected
}
}
}
@Test
public void testMapFieldClear() {
TestMap.Builder builder = TestMap.newBuilder().putInt32ToInt32Field(1, 2);
builder.clearInt32ToInt32Field();
assertThat(builder.getInt32ToInt32FieldCount()).isEqualTo(0);
}
@Test
public void testMutableMapLifecycle() {
TestMap.Builder builder = TestMap.newBuilder().putInt32ToInt32Field(1, 2);
assertThat(builder.build().getInt32ToInt32FieldMap()).isEqualTo(newMap(1, 2));
assertThat(builder.getInt32ToInt32FieldMap()).isEqualTo(newMap(1, 2));
builder.putInt32ToInt32Field(2, 3);
assertThat(builder.getInt32ToInt32FieldMap()).isEqualTo(newMap(1, 2, 2, 3));
builder.putInt32ToEnumField(1, TestMap.EnumValue.BAR);
assertThat(builder.build().getInt32ToEnumFieldMap())
.isEqualTo(newMap(1, TestMap.EnumValue.BAR));
assertThat(builder.getInt32ToEnumFieldMap()).isEqualTo(newMap(1, TestMap.EnumValue.BAR));
builder.putInt32ToEnumField(2, TestMap.EnumValue.FOO);
assertThat(builder.getInt32ToEnumFieldMap())
.isEqualTo(newMap(1, TestMap.EnumValue.BAR, 2, TestMap.EnumValue.FOO));
builder.putInt32ToStringField(1, "1");
assertThat(builder.build().getInt32ToStringFieldMap()).isEqualTo(newMap(1, "1"));
assertThat(builder.getInt32ToStringFieldMap()).isEqualTo(newMap(1, "1"));
builder.putInt32ToStringField(2, "2");
assertThat(builder.getInt32ToStringFieldMap()).isEqualTo(newMap(1, "1", 2, "2"));
builder.putInt32ToMessageField(1, TestMap.MessageValue.getDefaultInstance());
assertThat(builder.build().getInt32ToMessageFieldMap())
.isEqualTo(newMap(1, TestMap.MessageValue.getDefaultInstance()));
assertThat(builder.getInt32ToMessageFieldMap())
.isEqualTo(newMap(1, TestMap.MessageValue.getDefaultInstance()));
builder.putInt32ToMessageField(2, TestMap.MessageValue.getDefaultInstance());
assertThat(builder.getInt32ToMessageFieldMap())
.isEqualTo(
newMap(
1,
TestMap.MessageValue.getDefaultInstance(),
2,
TestMap.MessageValue.getDefaultInstance()));
}
@Test
public void testGettersAndSetters() throws Exception {
TestMap.Builder builder = TestMap.newBuilder();
TestMap message = builder.build();
assertMapValuesCleared(message);
builder = message.toBuilder();
setMapValues(builder);
message = builder.build();
assertMapValuesSet(message);
builder = message.toBuilder();
updateMapValues(builder);
message = builder.build();
assertMapValuesUpdated(message);
builder = message.toBuilder();
builder.clear();
assertMapValuesCleared(builder);
message = builder.build();
assertMapValuesCleared(message);
}
@Test
public void testPutAll() throws Exception {
TestMap.Builder sourceBuilder = TestMap.newBuilder();
setMapValues(sourceBuilder);
TestMap source = sourceBuilder.build();
assertMapValuesSet(source);
TestMap.Builder destination = TestMap.newBuilder();
copyMapValues(source, destination);
assertMapValuesSet(destination.build());
}
@Test
public void testPutAllForUnknownEnumValues() throws Exception {
TestMap.Builder sourceBuilder =
TestMap.newBuilder()
.putInt32ToEnumFieldValue(0, 0)
.putInt32ToEnumFieldValue(1, 1)
.putAllInt32ToEnumFieldValue(newMap(2, 1000)); // unknown value.
TestMap source = sourceBuilder.build();
TestMap.Builder destinationBuilder = TestMap.newBuilder();
destinationBuilder.putAllInt32ToEnumFieldValue(source.getInt32ToEnumFieldValueMap());
TestMap destination = destinationBuilder.build();
assertThat(destination.getInt32ToEnumFieldValueMap().get(0).intValue()).isEqualTo(0);
assertThat(destination.getInt32ToEnumFieldValueMap().get(1).intValue()).isEqualTo(1);
assertThat(destination.getInt32ToEnumFieldValueMap().get(2).intValue()).isEqualTo(1000);
assertThat(destination.getInt32ToEnumFieldCount()).isEqualTo(3);
}
@Test
public void testPutForUnknownEnumValues() throws Exception {
TestMap builder =
TestMap.newBuilder()
.putInt32ToEnumFieldValue(0, 0)
.putInt32ToEnumFieldValue(1, 1)
.putInt32ToEnumFieldValue(2, 1000)
.build(); // unknown value.
TestMap message = builder;
assertThat(message.getInt32ToEnumFieldValueOrThrow(0)).isEqualTo(0);
assertThat(message.getInt32ToEnumFieldValueOrThrow(1)).isEqualTo(1);
assertThat(message.getInt32ToEnumFieldValueOrThrow(2)).isEqualTo(1000);
assertThat(message.getInt32ToEnumFieldCount()).isEqualTo(3);
}
@Test
public void testPutChecksNullKeysAndValues() throws Exception {
TestMap.Builder builder = TestMap.newBuilder();
try {
builder.putInt32ToStringField(1, null);
assertWithMessage("expected exception").fail();
} catch (NullPointerException e) {
// expected.
}
try {
builder.putInt32ToBytesField(1, null);
assertWithMessage("expected exception").fail();
} catch (NullPointerException e) {
// expected.
}
try {
builder.putInt32ToEnumField(1, null);
assertWithMessage("expected exception").fail();
} catch (NullPointerException e) {
// expected.
}
try {
builder.putInt32ToMessageField(1, null);
assertWithMessage("expected exception").fail();
} catch (NullPointerException e) {
// expected.
}
try {
builder.putStringToInt32Field(null, 1);
assertWithMessage("expected exception").fail();
} catch (NullPointerException e) {
// expected.
}
}
@Test
public void testSerializeAndParse() throws Exception {
TestMap.Builder builder = TestMap.newBuilder();
setMapValues(builder);
TestMap message = builder.build();
assertThat(message.toByteString().size()).isEqualTo(message.getSerializedSize());
message = TestMap.parseFrom(message.toByteString());
assertMapValuesSet(message);
builder = message.toBuilder();
updateMapValues(builder);
message = builder.build();
assertThat(message.toByteString().size()).isEqualTo(message.getSerializedSize());
message = TestMap.parseFrom(message.toByteString());
assertMapValuesUpdated(message);
builder = message.toBuilder();
builder.clear();
message = builder.build();
assertThat(message.toByteString().size()).isEqualTo(message.getSerializedSize());
message = TestMap.parseFrom(message.toByteString());
assertMapValuesCleared(message);
}
private TestMap tryParseTestMap(BizarroTestMap bizarroMap) throws IOException {
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
CodedOutputStream output = CodedOutputStream.newInstance(byteArrayOutputStream);
bizarroMap.writeTo(output);
output.flush();
return TestMap.parseFrom(ByteString.copyFrom(byteArrayOutputStream.toByteArray()));
}
@Test
public void testParseError() throws Exception {
ByteString bytes = TestUtil.toBytes("SOME BYTES");
String stringKey = "a string key";
TestMap map =
tryParseTestMap(BizarroTestMap.newBuilder().putInt32ToInt32Field(5, bytes).build());
assertThat(map.getInt32ToInt32FieldOrDefault(5, -1)).isEqualTo(0);
map = tryParseTestMap(BizarroTestMap.newBuilder().putInt32ToStringField(stringKey, 5).build());
assertThat(map.getInt32ToStringFieldOrDefault(0, null)).isEmpty();
map = tryParseTestMap(BizarroTestMap.newBuilder().putInt32ToBytesField(stringKey, 5).build());
assertThat(ByteString.EMPTY).isEqualTo(map.getInt32ToBytesFieldOrDefault(0, null));
map =
tryParseTestMap(BizarroTestMap.newBuilder().putInt32ToEnumField(stringKey, bytes).build());
assertThat(map.getInt32ToEnumFieldOrDefault(0, null)).isEqualTo(TestMap.EnumValue.FOO);
try {
tryParseTestMap(BizarroTestMap.newBuilder().putInt32ToMessageField(stringKey, bytes).build());
assertWithMessage("expected exception").fail();
} catch (InvalidProtocolBufferException expected) {
assertThat(expected.getUnfinishedMessage()).isInstanceOf(TestMap.class);
map = (TestMap) expected.getUnfinishedMessage();
assertThat(map.getInt32ToMessageFieldMap()).isEmpty();
}
map =
tryParseTestMap(
BizarroTestMap.newBuilder().putStringToInt32Field(stringKey, bytes).build());
assertThat(map.getStringToInt32FieldOrDefault(stringKey, -1)).isEqualTo(0);
}
@Test
public void testMergeFrom() throws Exception {
TestMap.Builder builder = TestMap.newBuilder();
setMapValues(builder);
TestMap message = builder.build();
TestMap.Builder other = TestMap.newBuilder();
other.mergeFrom(message);
assertMapValuesSet(other.build());
}
@Test
public void testEqualsAndHashCode() throws Exception {
// Test that generated equals() and hashCode() will disregard the order
// of map entries when comparing/hashing map fields.
// We can't control the order of elements in a HashMap. The best we can do
// here is to add elements in different order.
TestMap b1 =
TestMap.newBuilder()
.putInt32ToInt32Field(1, 2)
.putInt32ToInt32Field(3, 4)
.putInt32ToInt32Field(5, 6)
.build();
TestMap m1 = b1;
TestMap.Builder b2 =
TestMap.newBuilder()
.putInt32ToInt32Field(5, 6)
.putInt32ToInt32Field(1, 2)
.putInt32ToInt32Field(3, 4);
TestMap m2 = b2.build();
assertThat(m2).isEqualTo(m1);
assertThat(m2.hashCode()).isEqualTo(m1.hashCode());
// Make sure we did compare map fields.
b2.putInt32ToInt32Field(1, 0);
m2 = b2.build();
assertThat(m1.equals(m2)).isFalse();
// Don't check m1.hashCode() != m2.hashCode() because it's not guaranteed
// to be different.
// Regression test for b/18549190: if a map is a subset of the other map,
// equals() should return false.
b2.removeInt32ToInt32Field(1);
m2 = b2.build();
assertThat(m1.equals(m2)).isFalse();
assertThat(m2.equals(m1)).isFalse();
}
@Test
@SuppressWarnings("ProtoNewBuilderMergeFrom")
public void testUnknownEnumValues() throws Exception {
TestMap.Builder builder =
TestMap.newBuilder()
.putInt32ToEnumFieldValue(0, 0)
.putInt32ToEnumFieldValue(1, 1)
.putInt32ToEnumFieldValue(2, 1000); // unknown value.
TestMap message = builder.build();
assertThat(message.getInt32ToEnumFieldMap()).containsEntry(0, TestMap.EnumValue.FOO);
assertThat(message.getInt32ToEnumFieldMap()).containsEntry(1, TestMap.EnumValue.BAR);
assertThat(message.getInt32ToEnumFieldMap()).containsEntry(2, TestMap.EnumValue.UNRECOGNIZED);
builder.putAllInt32ToEnumFieldValue(newMap(2, 1000)); // unknown value.
message = builder.build();
assertThat(message.getInt32ToEnumFieldMap()).containsEntry(2, TestMap.EnumValue.UNRECOGNIZED);
// Unknown enum values should be preserved after:
// 1. Serialization and parsing.
// 2. toBuild().
// 3. mergeFrom().
message = TestMap.parseFrom(message.toByteString(), ExtensionRegistryLite.getEmptyRegistry());
assertThat(message.getInt32ToEnumFieldValueMap().get(2).intValue()).isEqualTo(1000);
builder = message.toBuilder();
assertThat(builder.getInt32ToEnumFieldValueMap().get(2).intValue()).isEqualTo(1000);
builder = TestMap.newBuilder().mergeFrom(message);
assertThat(builder.getInt32ToEnumFieldValueMap().get(2).intValue()).isEqualTo(1000);
// hashCode()/equals() should take unknown enum values into account.
builder.putAllInt32ToEnumFieldValue(newMap(2, 1001));
TestMap message2 = builder.build();
assertThat(message.hashCode()).isNotEqualTo(message2.hashCode());
assertThat(message.equals(message2)).isFalse();
// Unknown values will be converted to UNRECOGNIZED so the resulted enum map
// should be the same.
assertThat(message.getInt32ToEnumFieldMap()).isEqualTo(message2.getInt32ToEnumFieldMap());
}
@Test
public void testIterationOrder() throws Exception {
TestMap.Builder builder = TestMap.newBuilder();
setMapValues(builder);
TestMap message = builder.build();
assertThat(new ArrayList<>(message.getStringToInt32FieldMap().keySet()))
.containsExactly("1", "2", "3")
.inOrder();
}
@Test
public void testGetMap() {
TestMap.Builder builder = TestMap.newBuilder();
setMapValues(builder);
TestMap message = builder.build();
assertThat(message.getStringToInt32FieldMap()).isEqualTo(message.getStringToInt32FieldMap());
assertThat(message.getInt32ToBytesFieldMap()).isEqualTo(message.getInt32ToBytesFieldMap());
assertThat(message.getInt32ToEnumFieldMap()).isEqualTo(message.getInt32ToEnumFieldMap());
assertThat(message.getInt32ToEnumFieldValueMap())
.isEqualTo(message.getInt32ToEnumFieldValueMap());
assertThat(message.getInt32ToMessageFieldMap()).isEqualTo(message.getInt32ToMessageFieldMap());
}
@Test
public void testContains() {
TestMap.Builder builder = TestMap.newBuilder();
setMapValues(builder);
assertMapContainsSetValues(builder);
assertMapContainsSetValues(builder.build());
}
private void assertMapContainsSetValues(TestMapOrBuilder testMapOrBuilder) {
assertThat(testMapOrBuilder.containsInt32ToInt32Field(1)).isTrue();
assertThat(testMapOrBuilder.containsInt32ToInt32Field(2)).isTrue();
assertThat(testMapOrBuilder.containsInt32ToInt32Field(3)).isTrue();
assertThat(testMapOrBuilder.containsInt32ToInt32Field(-1)).isFalse();
assertThat(testMapOrBuilder.containsInt32ToStringField(1)).isTrue();
assertThat(testMapOrBuilder.containsInt32ToStringField(2)).isTrue();
assertThat(testMapOrBuilder.containsInt32ToStringField(3)).isTrue();
assertThat(testMapOrBuilder.containsInt32ToStringField(-1)).isFalse();
assertThat(testMapOrBuilder.containsInt32ToBytesField(1)).isTrue();
assertThat(testMapOrBuilder.containsInt32ToBytesField(2)).isTrue();
assertThat(testMapOrBuilder.containsInt32ToBytesField(3)).isTrue();
assertThat(testMapOrBuilder.containsInt32ToBytesField(-1)).isFalse();
assertThat(testMapOrBuilder.containsInt32ToEnumField(1)).isTrue();
assertThat(testMapOrBuilder.containsInt32ToEnumField(2)).isTrue();
assertThat(testMapOrBuilder.containsInt32ToEnumField(3)).isTrue();
assertThat(testMapOrBuilder.containsInt32ToEnumField(-1)).isFalse();
assertThat(testMapOrBuilder.containsInt32ToMessageField(1)).isTrue();
assertThat(testMapOrBuilder.containsInt32ToMessageField(2)).isTrue();
assertThat(testMapOrBuilder.containsInt32ToMessageField(3)).isTrue();
assertThat(testMapOrBuilder.containsInt32ToMessageField(-1)).isFalse();
assertThat(testMapOrBuilder.containsStringToInt32Field("1")).isTrue();
assertThat(testMapOrBuilder.containsStringToInt32Field("2")).isTrue();
assertThat(testMapOrBuilder.containsStringToInt32Field("3")).isTrue();
assertThat(testMapOrBuilder.containsStringToInt32Field("-1")).isFalse();
}
@Test
public void testCount() {
TestMap.Builder builder = TestMap.newBuilder();
assertMapCounts(0, builder);
setMapValues(builder);
assertMapCounts(3, builder);
TestMap message = builder.build();
assertMapCounts(3, message);
builder = message.toBuilder().putInt32ToInt32Field(4, 44);
assertThat(builder.getInt32ToInt32FieldCount()).isEqualTo(4);
assertThat(builder.build().getInt32ToInt32FieldCount()).isEqualTo(4);
// already present - should be unchanged
builder.putInt32ToInt32Field(4, 44);
assertThat(builder.getInt32ToInt32FieldCount()).isEqualTo(4);
}
private void assertMapCounts(int expectedCount, TestMapOrBuilder testMapOrBuilder) {
assertThat(testMapOrBuilder.getInt32ToInt32FieldCount()).isEqualTo(expectedCount);
assertThat(testMapOrBuilder.getInt32ToStringFieldCount()).isEqualTo(expectedCount);
assertThat(testMapOrBuilder.getInt32ToBytesFieldCount()).isEqualTo(expectedCount);
assertThat(testMapOrBuilder.getInt32ToEnumFieldCount()).isEqualTo(expectedCount);
assertThat(testMapOrBuilder.getInt32ToMessageFieldCount()).isEqualTo(expectedCount);
assertThat(testMapOrBuilder.getStringToInt32FieldCount()).isEqualTo(expectedCount);
}
@Test
public void testGetOrDefault() {
TestMap.Builder builder = TestMap.newBuilder();
assertMapCounts(0, builder);
setMapValues(builder);
doTestGetOrDefault(builder);
doTestGetOrDefault(builder.build());
}
public void doTestGetOrDefault(TestMapOrBuilder testMapOrBuilder) {
assertThat(testMapOrBuilder.getInt32ToInt32FieldOrDefault(1, -11)).isEqualTo(11);
assertThat(testMapOrBuilder.getInt32ToInt32FieldOrDefault(-1, -11)).isEqualTo(-11);
assertThat(testMapOrBuilder.getInt32ToStringFieldOrDefault(1, "-11")).isEqualTo("11");
assertWithMessage("-11")
.that(testMapOrBuilder.getInt32ToStringFieldOrDefault(-1, null))
.isNull();
assertThat(testMapOrBuilder.getInt32ToBytesFieldOrDefault(1, null))
.isEqualTo(TestUtil.toBytes("11"));
assertThat(testMapOrBuilder.getInt32ToBytesFieldOrDefault(-1, null)).isNull();
assertThat(testMapOrBuilder.getInt32ToEnumFieldOrDefault(1, null))
.isEqualTo(TestMap.EnumValue.FOO);
assertThat(testMapOrBuilder.getInt32ToEnumFieldOrDefault(-1, null)).isNull();
assertThat(testMapOrBuilder.getInt32ToEnumFieldValueOrDefault(2, -1))
.isEqualTo(TestMap.EnumValue.BAR.getNumber());
assertThat(testMapOrBuilder.getInt32ToEnumFieldValueOrDefault(-1000, -1)).isEqualTo(-1);
assertThat(testMapOrBuilder.getInt32ToMessageFieldOrDefault(1, null))
.isEqualTo(MessageValue.newBuilder().setValue(11).build());
assertThat(testMapOrBuilder.getInt32ToMessageFieldOrDefault(-1, null)).isNull();
assertThat(testMapOrBuilder.getStringToInt32FieldOrDefault("1", -11)).isEqualTo(11);
assertThat(testMapOrBuilder.getStringToInt32FieldOrDefault("-1", -11)).isEqualTo(-11);
try {
testMapOrBuilder.getStringToInt32FieldOrDefault(null, -11);
assertWithMessage("expected exception").fail();
} catch (NullPointerException e) {
// expected
}
}
@Test
public void testGetOrThrow() {
TestMap.Builder builder = TestMap.newBuilder();
assertMapCounts(0, builder);
setMapValues(builder);
doTestGetOrDefault(builder);
doTestGetOrDefault(builder.build());
}
public void doTestGetOrThrow(TestMapOrBuilder testMapOrBuilder) {
assertThat(testMapOrBuilder.getInt32ToInt32FieldOrThrow(1)).isEqualTo(11);
try {
testMapOrBuilder.getInt32ToInt32FieldOrThrow(-1);
assertWithMessage("expected exception").fail();
} catch (IllegalArgumentException e) {
// expected
}
assertThat(testMapOrBuilder.getInt32ToStringFieldOrThrow(1)).isEqualTo("11");
try {
testMapOrBuilder.getInt32ToStringFieldOrThrow(-1);
assertWithMessage("expected exception").fail();
} catch (IllegalArgumentException e) {
// expected
}
assertThat(testMapOrBuilder.getInt32ToBytesFieldOrThrow(1)).isEqualTo(TestUtil.toBytes("11"));
try {
testMapOrBuilder.getInt32ToBytesFieldOrThrow(-1);
assertWithMessage("expected exception").fail();
} catch (IllegalArgumentException e) {
// expected
}
assertThat(testMapOrBuilder.getInt32ToEnumFieldOrThrow(1)).isEqualTo(TestMap.EnumValue.FOO);
try {
testMapOrBuilder.getInt32ToEnumFieldOrThrow(-1);
assertWithMessage("expected exception").fail();
} catch (IllegalArgumentException e) {
// expected
}
assertThat(testMapOrBuilder.getInt32ToEnumFieldValueOrThrow(2))
.isEqualTo(TestMap.EnumValue.BAR.getNumber());
try {
testMapOrBuilder.getInt32ToEnumFieldValueOrThrow(-1);
assertWithMessage("expected exception").fail();
} catch (IllegalArgumentException e) {
// expected
}
assertThat(testMapOrBuilder.getInt32ToMessageFieldOrThrow(1))
.isEqualTo(MessageValue.newBuilder().setValue(11).build());
try {
testMapOrBuilder.getInt32ToMessageFieldOrThrow(-1);
assertWithMessage("expected exception").fail();
} catch (IllegalArgumentException e) {
// expected
}
assertThat(testMapOrBuilder.getStringToInt32FieldOrThrow("1")).isEqualTo(11);
try {
testMapOrBuilder.getStringToInt32FieldOrThrow("-1");
assertWithMessage("expected exception").fail();
} catch (IllegalArgumentException e) {
// expected
}
try {
testMapOrBuilder.getStringToInt32FieldOrThrow(null);
assertWithMessage("expected exception").fail();
} catch (NullPointerException e) {
// expected
}
}
@Test
public void testPut() {
TestMap.Builder builder = TestMap.newBuilder();
builder.putInt32ToInt32Field(1, 11);
assertThat(builder.getInt32ToInt32FieldOrThrow(1)).isEqualTo(11);
builder.putInt32ToStringField(1, "a");
assertThat(builder.getInt32ToStringFieldOrThrow(1)).isEqualTo("a");
try {
builder.putInt32ToStringField(1, null);
assertWithMessage("expected exception").fail();
} catch (NullPointerException e) {
// expected
}
builder.putInt32ToBytesField(1, TestUtil.toBytes("11"));
assertThat(builder.getInt32ToBytesFieldOrThrow(1)).isEqualTo(TestUtil.toBytes("11"));
try {
builder.putInt32ToBytesField(1, null);
assertWithMessage("expected exception").fail();
} catch (NullPointerException e) {
// expected
}
builder.putInt32ToEnumField(1, TestMap.EnumValue.FOO);
assertThat(builder.getInt32ToEnumFieldOrThrow(1)).isEqualTo(TestMap.EnumValue.FOO);
try {
builder.putInt32ToEnumField(1, null);
assertWithMessage("expected exception").fail();
} catch (NullPointerException e) {
// expected
}
builder.putStringToInt32Field("a", 1);
assertThat(builder.getStringToInt32FieldOrThrow("a")).isEqualTo(1);
try {
builder.putStringToInt32Field(null, -1);
} catch (NullPointerException e) {
// expected
}
}
@Test
public void testRemove() {
TestMap.Builder builder = TestMap.newBuilder();
setMapValues(builder);
assertThat(builder.getInt32ToInt32FieldOrThrow(1)).isEqualTo(11);
for (int times = 0; times < 2; times++) {
builder.removeInt32ToInt32Field(1);
assertThat(builder.getInt32ToInt32FieldOrDefault(1, -1)).isEqualTo(-1);
}
assertThat(builder.getInt32ToStringFieldOrThrow(1)).isEqualTo("11");
for (int times = 0; times < 2; times++) {
builder.removeInt32ToStringField(1);
assertThat(builder.getInt32ToStringFieldOrDefault(1, null)).isNull();
}
assertThat(builder.getInt32ToBytesFieldOrThrow(1)).isEqualTo(TestUtil.toBytes("11"));
for (int times = 0; times < 2; times++) {
builder.removeInt32ToBytesField(1);
assertThat(builder.getInt32ToBytesFieldOrDefault(1, null)).isNull();
}
assertThat(builder.getInt32ToEnumFieldOrThrow(1)).isEqualTo(TestMap.EnumValue.FOO);
for (int times = 0; times < 2; times++) {
builder.removeInt32ToEnumField(1);
assertThat(builder.getInt32ToEnumFieldOrDefault(1, null)).isNull();
}
assertThat(builder.getStringToInt32FieldOrThrow("1")).isEqualTo(11);
for (int times = 0; times < 2; times++) {
builder.removeStringToInt32Field("1");
assertThat(builder.getStringToInt32FieldOrDefault("1", -1)).isEqualTo(-1);
}
try {
builder.removeStringToInt32Field(null);
assertWithMessage("expected exception").fail();
} catch (NullPointerException e) {
// expected
}
}
private static <K, V> Map<K, V> newMap(K key1, V value1) {
Map<K, V> map = new HashMap<>();
map.put(key1, value1);
return map;
}
private static <K, V> Map<K, V> newMap(K key1, V value1, K key2, V value2) {
Map<K, V> map = new HashMap<>();
map.put(key1, value1);
map.put(key2, value2);
return map;
}
@Test
public void testMap_withNulls() {
TestMap.Builder builder = TestMap.newBuilder();
try {
builder.putStringToInt32Field(null, 3);
assertWithMessage("expected exception").fail();
} catch (NullPointerException expected) {
}
try {
builder.putAllStringToInt32Field(newMap(null, 3, "hi", 4));
assertWithMessage("expected exception").fail();
} catch (NullPointerException expected) {
}
try {
builder.putInt32ToMessageField(3, null);
assertWithMessage("expected exception").fail();
} catch (NullPointerException expected) {
}
try {
builder.putAllInt32ToMessageField(
MapLiteTest.<Integer, MessageValue>newMap(4, null, 5, null));
assertWithMessage("expected exception").fail();
} catch (NullPointerException expected) {
}
try {
builder.putAllInt32ToMessageField(null);
assertWithMessage("expected exception").fail();
} catch (NullPointerException expected) {
}
assertThat(builder.build().toByteArray()).isEqualTo(new byte[0]);
}
}
|
apache/hadoop-common | 35,973 | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.hadoop.security;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
import org.apache.hadoop.security.authentication.util.KerberosName;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.util.Shell;
import org.junit.*;
import javax.security.auth.Subject;
import javax.security.auth.kerberos.KerberosPrincipal;
import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.LoginContext;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.lang.reflect.Method;
import java.security.PrivilegedExceptionAction;
import java.util.Collection;
import java.util.ConcurrentModificationException;
import java.util.LinkedHashSet;
import java.util.Set;
import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_USER_GROUP_METRICS_PERCENTILES_INTERVALS;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTH_TO_LOCAL;
import static org.apache.hadoop.ipc.TestSaslRPC.*;
import static org.apache.hadoop.test.MetricsAsserts.*;
import static org.junit.Assert.*;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class TestUserGroupInformation {
final private static String USER_NAME = "user1@HADOOP.APACHE.ORG";
final private static String GROUP1_NAME = "group1";
final private static String GROUP2_NAME = "group2";
final private static String GROUP3_NAME = "group3";
final private static String[] GROUP_NAMES =
new String[]{GROUP1_NAME, GROUP2_NAME, GROUP3_NAME};
// Rollover interval of percentile metrics (in seconds)
private static final int PERCENTILES_INTERVAL = 1;
private static Configuration conf;
/**
* UGI should not use the default security conf, else it will collide
* with other classes that may change the default conf. Using this dummy
* class that simply throws an exception will ensure that the tests fail
* if UGI uses the static default config instead of its own config
*/
private static class DummyLoginConfiguration extends
javax.security.auth.login.Configuration
{
@Override
public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
throw new RuntimeException("UGI is not using its own security conf!");
}
}
/** configure ugi */
@BeforeClass
public static void setup() {
javax.security.auth.login.Configuration.setConfiguration(
new DummyLoginConfiguration());
// doesn't matter what it is, but getGroups needs it set...
// use HADOOP_HOME environment variable to prevent interfering with logic
// that finds winutils.exe
String home = System.getenv("HADOOP_HOME");
System.setProperty("hadoop.home.dir", (home != null ? home : "."));
// fake the realm is kerberos is enabled
System.setProperty("java.security.krb5.kdc", "");
System.setProperty("java.security.krb5.realm", "DEFAULT.REALM");
}
@Before
public void setupUgi() {
conf = new Configuration();
UserGroupInformation.reset();
UserGroupInformation.setConfiguration(conf);
}
@After
public void resetUgi() {
UserGroupInformation.setLoginUser(null);
}
@Test (timeout = 30000)
public void testSimpleLogin() throws IOException {
tryLoginAuthenticationMethod(AuthenticationMethod.SIMPLE, true);
}
@Test (timeout = 30000)
public void testTokenLogin() throws IOException {
tryLoginAuthenticationMethod(AuthenticationMethod.TOKEN, false);
}
@Test (timeout = 30000)
public void testProxyLogin() throws IOException {
tryLoginAuthenticationMethod(AuthenticationMethod.PROXY, false);
}
private void tryLoginAuthenticationMethod(AuthenticationMethod method,
boolean expectSuccess)
throws IOException {
SecurityUtil.setAuthenticationMethod(method, conf);
UserGroupInformation.setConfiguration(conf); // pick up changed auth
UserGroupInformation ugi = null;
Exception ex = null;
try {
ugi = UserGroupInformation.getLoginUser();
} catch (Exception e) {
ex = e;
}
if (expectSuccess) {
assertNotNull(ugi);
assertEquals(method, ugi.getAuthenticationMethod());
} else {
assertNotNull(ex);
assertEquals(UnsupportedOperationException.class, ex.getClass());
assertEquals(method + " login authentication is not supported",
ex.getMessage());
}
}
@Test (timeout = 30000)
public void testGetRealAuthenticationMethod() {
UserGroupInformation ugi = UserGroupInformation.createRemoteUser("user1");
ugi.setAuthenticationMethod(AuthenticationMethod.SIMPLE);
assertEquals(AuthenticationMethod.SIMPLE, ugi.getAuthenticationMethod());
assertEquals(AuthenticationMethod.SIMPLE, ugi.getRealAuthenticationMethod());
ugi = UserGroupInformation.createProxyUser("user2", ugi);
assertEquals(AuthenticationMethod.PROXY, ugi.getAuthenticationMethod());
assertEquals(AuthenticationMethod.SIMPLE, ugi.getRealAuthenticationMethod());
}
@Test (timeout = 30000)
public void testCreateRemoteUser() {
UserGroupInformation ugi = UserGroupInformation.createRemoteUser("user1");
assertEquals(AuthenticationMethod.SIMPLE, ugi.getAuthenticationMethod());
assertTrue (ugi.toString().contains("(auth:SIMPLE)"));
ugi = UserGroupInformation.createRemoteUser("user1",
AuthMethod.KERBEROS);
assertEquals(AuthenticationMethod.KERBEROS, ugi.getAuthenticationMethod());
assertTrue (ugi.toString().contains("(auth:KERBEROS)"));
}
/** Test login method */
@Test (timeout = 30000)
public void testLogin() throws Exception {
conf.set(HADOOP_USER_GROUP_METRICS_PERCENTILES_INTERVALS,
String.valueOf(PERCENTILES_INTERVAL));
UserGroupInformation.setConfiguration(conf);
// login from unix
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
assertEquals(UserGroupInformation.getCurrentUser(),
UserGroupInformation.getLoginUser());
assertTrue(ugi.getGroupNames().length >= 1);
verifyGroupMetrics(1);
// ensure that doAs works correctly
UserGroupInformation userGroupInfo =
UserGroupInformation.createUserForTesting(USER_NAME, GROUP_NAMES);
UserGroupInformation curUGI =
userGroupInfo.doAs(new PrivilegedExceptionAction<UserGroupInformation>(){
@Override
public UserGroupInformation run() throws IOException {
return UserGroupInformation.getCurrentUser();
}});
// make sure in the scope of the doAs, the right user is current
assertEquals(curUGI, userGroupInfo);
// make sure it is not the same as the login user
assertFalse(curUGI.equals(UserGroupInformation.getLoginUser()));
}
/**
* given user name - get all the groups.
* Needs to happen before creating the test users
*/
@Test (timeout = 30000)
public void testGetServerSideGroups() throws IOException,
InterruptedException {
// get the user name
Process pp = Runtime.getRuntime().exec("whoami");
BufferedReader br = new BufferedReader
(new InputStreamReader(pp.getInputStream()));
String userName = br.readLine().trim();
// If on windows domain, token format is DOMAIN\\user and we want to
// extract only the user name
if(Shell.WINDOWS) {
int sp = userName.lastIndexOf('\\');
if (sp != -1) {
userName = userName.substring(sp + 1);
}
// user names are case insensitive on Windows. Make consistent
userName = userName.toLowerCase();
}
// get the groups
pp = Runtime.getRuntime().exec(Shell.WINDOWS ?
Shell.WINUTILS + " groups -F" : "id -Gn");
br = new BufferedReader(new InputStreamReader(pp.getInputStream()));
String line = br.readLine();
System.out.println(userName + ":" + line);
Set<String> groups = new LinkedHashSet<String> ();
String[] tokens = line.split(Shell.TOKEN_SEPARATOR_REGEX);
for(String s: tokens) {
groups.add(s);
}
final UserGroupInformation login = UserGroupInformation.getCurrentUser();
String loginUserName = login.getShortUserName();
if(Shell.WINDOWS) {
// user names are case insensitive on Windows. Make consistent
loginUserName = loginUserName.toLowerCase();
}
assertEquals(userName, loginUserName);
String[] gi = login.getGroupNames();
assertEquals(groups.size(), gi.length);
for(int i=0; i < gi.length; i++) {
assertTrue(groups.contains(gi[i]));
}
final UserGroupInformation fakeUser =
UserGroupInformation.createRemoteUser("foo.bar");
fakeUser.doAs(new PrivilegedExceptionAction<Object>(){
@Override
public Object run() throws IOException {
UserGroupInformation current = UserGroupInformation.getCurrentUser();
assertFalse(current.equals(login));
assertEquals(current, fakeUser);
assertEquals(0, current.getGroupNames().length);
return null;
}});
}
/** test constructor */
@Test (timeout = 30000)
public void testConstructor() throws Exception {
// security off, so default should just return simple name
testConstructorSuccess("user1", "user1");
testConstructorSuccess("user2@DEFAULT.REALM", "user2");
testConstructorSuccess("user3/cron@DEFAULT.REALM", "user3");
testConstructorSuccess("user4@OTHER.REALM", "user4");
testConstructorSuccess("user5/cron@OTHER.REALM", "user5");
// failure test
testConstructorFailures(null);
testConstructorFailures("");
}
/** test constructor */
@Test (timeout = 30000)
public void testConstructorWithRules() throws Exception {
// security off, but use rules if explicitly set
conf.set(HADOOP_SECURITY_AUTH_TO_LOCAL,
"RULE:[1:$1@$0](.*@OTHER.REALM)s/(.*)@.*/other-$1/");
UserGroupInformation.setConfiguration(conf);
testConstructorSuccess("user1", "user1");
testConstructorSuccess("user4@OTHER.REALM", "other-user4");
// failure test
testConstructorFailures("user2@DEFAULT.REALM");
testConstructorFailures("user3/cron@DEFAULT.REALM");
testConstructorFailures("user5/cron@OTHER.REALM");
testConstructorFailures(null);
testConstructorFailures("");
}
/** test constructor */
@Test (timeout = 30000)
public void testConstructorWithKerberos() throws Exception {
// security on, default is remove default realm
SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS, conf);
UserGroupInformation.setConfiguration(conf);
testConstructorSuccess("user1", "user1");
testConstructorSuccess("user2@DEFAULT.REALM", "user2");
testConstructorSuccess("user3/cron@DEFAULT.REALM", "user3");
// failure test
testConstructorFailures("user4@OTHER.REALM");
testConstructorFailures("user5/cron@OTHER.REALM");
testConstructorFailures(null);
testConstructorFailures("");
}
/** test constructor */
@Test (timeout = 30000)
public void testConstructorWithKerberosRules() throws Exception {
// security on, explicit rules
SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS, conf);
conf.set(HADOOP_SECURITY_AUTH_TO_LOCAL,
"RULE:[2:$1@$0](.*@OTHER.REALM)s/(.*)@.*/other-$1/" +
"RULE:[1:$1@$0](.*@OTHER.REALM)s/(.*)@.*/other-$1/" +
"DEFAULT");
UserGroupInformation.setConfiguration(conf);
testConstructorSuccess("user1", "user1");
testConstructorSuccess("user2@DEFAULT.REALM", "user2");
testConstructorSuccess("user3/cron@DEFAULT.REALM", "user3");
testConstructorSuccess("user4@OTHER.REALM", "other-user4");
testConstructorSuccess("user5/cron@OTHER.REALM", "other-user5");
// failure test
testConstructorFailures(null);
testConstructorFailures("");
}
private void testConstructorSuccess(String principal, String shortName) {
UserGroupInformation ugi =
UserGroupInformation.createUserForTesting(principal, GROUP_NAMES);
// make sure the short and full user names are correct
assertEquals(principal, ugi.getUserName());
assertEquals(shortName, ugi.getShortUserName());
}
private void testConstructorFailures(String userName) {
try {
UserGroupInformation.createRemoteUser(userName);
fail("user:"+userName+" wasn't invalid");
} catch (IllegalArgumentException e) {
String expect = (userName == null || userName.isEmpty())
? "Null user" : "Illegal principal name "+userName;
assertEquals(expect, e.getMessage());
}
}
@Test (timeout = 30000)
public void testSetConfigWithRules() {
String[] rules = { "RULE:[1:TEST1]", "RULE:[1:TEST2]", "RULE:[1:TEST3]" };
// explicitly set a rule
UserGroupInformation.reset();
assertFalse(KerberosName.hasRulesBeenSet());
KerberosName.setRules(rules[0]);
assertTrue(KerberosName.hasRulesBeenSet());
assertEquals(rules[0], KerberosName.getRules());
// implicit init should honor rules already being set
UserGroupInformation.createUserForTesting("someone", new String[0]);
assertEquals(rules[0], KerberosName.getRules());
// set conf, should override
conf.set(HADOOP_SECURITY_AUTH_TO_LOCAL, rules[1]);
UserGroupInformation.setConfiguration(conf);
assertEquals(rules[1], KerberosName.getRules());
// set conf, should again override
conf.set(HADOOP_SECURITY_AUTH_TO_LOCAL, rules[2]);
UserGroupInformation.setConfiguration(conf);
assertEquals(rules[2], KerberosName.getRules());
// implicit init should honor rules already being set
UserGroupInformation.createUserForTesting("someone", new String[0]);
assertEquals(rules[2], KerberosName.getRules());
}
@Test (timeout = 30000)
public void testEnsureInitWithRules() throws IOException {
String rules = "RULE:[1:RULE1]";
// trigger implicit init, rules should init
UserGroupInformation.reset();
assertFalse(KerberosName.hasRulesBeenSet());
UserGroupInformation.createUserForTesting("someone", new String[0]);
assertTrue(KerberosName.hasRulesBeenSet());
// set a rule, trigger implicit init, rule should not change
UserGroupInformation.reset();
KerberosName.setRules(rules);
assertTrue(KerberosName.hasRulesBeenSet());
assertEquals(rules, KerberosName.getRules());
UserGroupInformation.createUserForTesting("someone", new String[0]);
assertEquals(rules, KerberosName.getRules());
}
@Test (timeout = 30000)
public void testEquals() throws Exception {
UserGroupInformation uugi =
UserGroupInformation.createUserForTesting(USER_NAME, GROUP_NAMES);
assertEquals(uugi, uugi);
// The subjects should be different, so this should fail
UserGroupInformation ugi2 =
UserGroupInformation.createUserForTesting(USER_NAME, GROUP_NAMES);
assertFalse(uugi.equals(ugi2));
assertFalse(uugi.hashCode() == ugi2.hashCode());
// two ugi that have the same subject need to be equal
UserGroupInformation ugi3 = new UserGroupInformation(uugi.getSubject());
assertEquals(uugi, ugi3);
assertEquals(uugi.hashCode(), ugi3.hashCode());
}
@Test (timeout = 30000)
public void testEqualsWithRealUser() throws Exception {
UserGroupInformation realUgi1 = UserGroupInformation.createUserForTesting(
"RealUser", GROUP_NAMES);
UserGroupInformation proxyUgi1 = UserGroupInformation.createProxyUser(
USER_NAME, realUgi1);
UserGroupInformation proxyUgi2 =
new UserGroupInformation( proxyUgi1.getSubject());
UserGroupInformation remoteUgi = UserGroupInformation.createRemoteUser(USER_NAME);
assertEquals(proxyUgi1, proxyUgi2);
assertFalse(remoteUgi.equals(proxyUgi1));
}
@Test (timeout = 30000)
public void testGettingGroups() throws Exception {
UserGroupInformation uugi =
UserGroupInformation.createUserForTesting(USER_NAME, GROUP_NAMES);
assertEquals(USER_NAME, uugi.getUserName());
assertArrayEquals(new String[]{GROUP1_NAME, GROUP2_NAME, GROUP3_NAME},
uugi.getGroupNames());
}
@SuppressWarnings("unchecked") // from Mockito mocks
@Test (timeout = 30000)
public <T extends TokenIdentifier> void testAddToken() throws Exception {
UserGroupInformation ugi =
UserGroupInformation.createRemoteUser("someone");
Token<T> t1 = mock(Token.class);
Token<T> t2 = mock(Token.class);
Token<T> t3 = mock(Token.class);
// add token to ugi
ugi.addToken(t1);
checkTokens(ugi, t1);
// replace token t1 with t2 - with same key (null)
ugi.addToken(t2);
checkTokens(ugi, t2);
// change t1 service and add token
when(t1.getService()).thenReturn(new Text("t1"));
ugi.addToken(t1);
checkTokens(ugi, t1, t2);
// overwrite t1 token with t3 - same key (!null)
when(t3.getService()).thenReturn(new Text("t1"));
ugi.addToken(t3);
checkTokens(ugi, t2, t3);
// just try to re-add with new name
when(t1.getService()).thenReturn(new Text("t1.1"));
ugi.addToken(t1);
checkTokens(ugi, t1, t2, t3);
// just try to re-add with new name again
ugi.addToken(t1);
checkTokens(ugi, t1, t2, t3);
}
@SuppressWarnings("unchecked") // from Mockito mocks
@Test (timeout = 30000)
public <T extends TokenIdentifier> void testGetCreds() throws Exception {
UserGroupInformation ugi =
UserGroupInformation.createRemoteUser("someone");
Text service = new Text("service");
Token<T> t1 = mock(Token.class);
when(t1.getService()).thenReturn(service);
Token<T> t2 = mock(Token.class);
when(t2.getService()).thenReturn(new Text("service2"));
Token<T> t3 = mock(Token.class);
when(t3.getService()).thenReturn(service);
// add token to ugi
ugi.addToken(t1);
ugi.addToken(t2);
checkTokens(ugi, t1, t2);
Credentials creds = ugi.getCredentials();
creds.addToken(t3.getService(), t3);
assertSame(t3, creds.getToken(service));
// check that ugi wasn't modified
checkTokens(ugi, t1, t2);
}
@SuppressWarnings("unchecked") // from Mockito mocks
@Test (timeout = 30000)
public <T extends TokenIdentifier> void testAddCreds() throws Exception {
UserGroupInformation ugi =
UserGroupInformation.createRemoteUser("someone");
Text service = new Text("service");
Token<T> t1 = mock(Token.class);
when(t1.getService()).thenReturn(service);
Token<T> t2 = mock(Token.class);
when(t2.getService()).thenReturn(new Text("service2"));
byte[] secret = new byte[]{};
Text secretKey = new Text("sshhh");
// fill credentials
Credentials creds = new Credentials();
creds.addToken(t1.getService(), t1);
creds.addToken(t2.getService(), t2);
creds.addSecretKey(secretKey, secret);
// add creds to ugi, and check ugi
ugi.addCredentials(creds);
checkTokens(ugi, t1, t2);
assertSame(secret, ugi.getCredentials().getSecretKey(secretKey));
}
@Test (timeout = 30000)
public <T extends TokenIdentifier> void testGetCredsNotSame()
throws Exception {
UserGroupInformation ugi =
UserGroupInformation.createRemoteUser("someone");
Credentials creds = ugi.getCredentials();
// should always get a new copy
assertNotSame(creds, ugi.getCredentials());
}
private void checkTokens(UserGroupInformation ugi, Token<?> ... tokens) {
// check the ugi's token collection
Collection<Token<?>> ugiTokens = ugi.getTokens();
for (Token<?> t : tokens) {
assertTrue(ugiTokens.contains(t));
}
assertEquals(tokens.length, ugiTokens.size());
// check the ugi's credentials
Credentials ugiCreds = ugi.getCredentials();
for (Token<?> t : tokens) {
assertSame(t, ugiCreds.getToken(t.getService()));
}
assertEquals(tokens.length, ugiCreds.numberOfTokens());
}
@SuppressWarnings("unchecked") // from Mockito mocks
@Test (timeout = 30000)
public <T extends TokenIdentifier> void testAddNamedToken() throws Exception {
UserGroupInformation ugi =
UserGroupInformation.createRemoteUser("someone");
Token<T> t1 = mock(Token.class);
Text service1 = new Text("t1");
Text service2 = new Text("t2");
when(t1.getService()).thenReturn(service1);
// add token
ugi.addToken(service1, t1);
assertSame(t1, ugi.getCredentials().getToken(service1));
// add token with another name
ugi.addToken(service2, t1);
assertSame(t1, ugi.getCredentials().getToken(service1));
assertSame(t1, ugi.getCredentials().getToken(service2));
}
@SuppressWarnings("unchecked") // from Mockito mocks
@Test (timeout = 30000)
public <T extends TokenIdentifier> void testUGITokens() throws Exception {
UserGroupInformation ugi =
UserGroupInformation.createUserForTesting("TheDoctor",
new String [] { "TheTARDIS"});
Token<T> t1 = mock(Token.class);
when(t1.getService()).thenReturn(new Text("t1"));
Token<T> t2 = mock(Token.class);
when(t2.getService()).thenReturn(new Text("t2"));
Credentials creds = new Credentials();
byte[] secretKey = new byte[]{};
Text secretName = new Text("shhh");
creds.addSecretKey(secretName, secretKey);
ugi.addToken(t1);
ugi.addToken(t2);
ugi.addCredentials(creds);
Collection<Token<? extends TokenIdentifier>> z = ugi.getTokens();
assertTrue(z.contains(t1));
assertTrue(z.contains(t2));
assertEquals(2, z.size());
Credentials ugiCreds = ugi.getCredentials();
assertSame(secretKey, ugiCreds.getSecretKey(secretName));
assertEquals(1, ugiCreds.numberOfSecretKeys());
try {
z.remove(t1);
fail("Shouldn't be able to modify token collection from UGI");
} catch(UnsupportedOperationException uoe) {
// Can't modify tokens
}
// ensure that the tokens are passed through doAs
Collection<Token<? extends TokenIdentifier>> otherSet =
ugi.doAs(new PrivilegedExceptionAction<Collection<Token<?>>>(){
@Override
public Collection<Token<?>> run() throws IOException {
return UserGroupInformation.getCurrentUser().getTokens();
}
});
assertTrue(otherSet.contains(t1));
assertTrue(otherSet.contains(t2));
}
@Test (timeout = 30000)
public void testTokenIdentifiers() throws Exception {
UserGroupInformation ugi = UserGroupInformation.createUserForTesting(
"TheDoctor", new String[] { "TheTARDIS" });
TokenIdentifier t1 = mock(TokenIdentifier.class);
TokenIdentifier t2 = mock(TokenIdentifier.class);
ugi.addTokenIdentifier(t1);
ugi.addTokenIdentifier(t2);
Collection<TokenIdentifier> z = ugi.getTokenIdentifiers();
assertTrue(z.contains(t1));
assertTrue(z.contains(t2));
assertEquals(2, z.size());
// ensure that the token identifiers are passed through doAs
Collection<TokenIdentifier> otherSet = ugi
.doAs(new PrivilegedExceptionAction<Collection<TokenIdentifier>>() {
@Override
public Collection<TokenIdentifier> run() throws IOException {
return UserGroupInformation.getCurrentUser().getTokenIdentifiers();
}
});
assertTrue(otherSet.contains(t1));
assertTrue(otherSet.contains(t2));
assertEquals(2, otherSet.size());
}
@Test (timeout = 30000)
public void testTestAuthMethod() throws Exception {
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
// verify the reverse mappings works
for (AuthenticationMethod am : AuthenticationMethod.values()) {
if (am.getAuthMethod() != null) {
ugi.setAuthenticationMethod(am.getAuthMethod());
assertEquals(am, ugi.getAuthenticationMethod());
}
}
}
@Test (timeout = 30000)
public void testUGIAuthMethod() throws Exception {
final UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
final AuthenticationMethod am = AuthenticationMethod.KERBEROS;
ugi.setAuthenticationMethod(am);
Assert.assertEquals(am, ugi.getAuthenticationMethod());
ugi.doAs(new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws IOException {
Assert.assertEquals(am, UserGroupInformation.getCurrentUser()
.getAuthenticationMethod());
return null;
}
});
}
@Test (timeout = 30000)
public void testUGIAuthMethodInRealUser() throws Exception {
final UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
UserGroupInformation proxyUgi = UserGroupInformation.createProxyUser(
"proxy", ugi);
final AuthenticationMethod am = AuthenticationMethod.KERBEROS;
ugi.setAuthenticationMethod(am);
Assert.assertEquals(am, ugi.getAuthenticationMethod());
Assert.assertEquals(AuthenticationMethod.PROXY,
proxyUgi.getAuthenticationMethod());
Assert.assertEquals(am, UserGroupInformation
.getRealAuthenticationMethod(proxyUgi));
proxyUgi.doAs(new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws IOException {
Assert.assertEquals(AuthenticationMethod.PROXY, UserGroupInformation
.getCurrentUser().getAuthenticationMethod());
Assert.assertEquals(am, UserGroupInformation.getCurrentUser()
.getRealUser().getAuthenticationMethod());
return null;
}
});
UserGroupInformation proxyUgi2 =
new UserGroupInformation(proxyUgi.getSubject());
proxyUgi2.setAuthenticationMethod(AuthenticationMethod.PROXY);
Assert.assertEquals(proxyUgi, proxyUgi2);
// Equality should work if authMethod is null
UserGroupInformation realugi = UserGroupInformation.getCurrentUser();
UserGroupInformation proxyUgi3 = UserGroupInformation.createProxyUser(
"proxyAnother", realugi);
UserGroupInformation proxyUgi4 =
new UserGroupInformation(proxyUgi3.getSubject());
Assert.assertEquals(proxyUgi3, proxyUgi4);
}
@Test (timeout = 30000)
public void testLoginObjectInSubject() throws Exception {
UserGroupInformation loginUgi = UserGroupInformation.getLoginUser();
UserGroupInformation anotherUgi = new UserGroupInformation(loginUgi
.getSubject());
LoginContext login1 = loginUgi.getSubject().getPrincipals(User.class)
.iterator().next().getLogin();
LoginContext login2 = anotherUgi.getSubject().getPrincipals(User.class)
.iterator().next().getLogin();
//login1 and login2 must be same instances
Assert.assertTrue(login1 == login2);
}
@Test (timeout = 30000)
public void testLoginModuleCommit() throws Exception {
UserGroupInformation loginUgi = UserGroupInformation.getLoginUser();
User user1 = loginUgi.getSubject().getPrincipals(User.class).iterator()
.next();
LoginContext login = user1.getLogin();
login.logout();
login.login();
User user2 = loginUgi.getSubject().getPrincipals(User.class).iterator()
.next();
// user1 and user2 must be same instances.
Assert.assertTrue(user1 == user2);
}
public static void verifyLoginMetrics(long success, int failure)
throws IOException {
// Ensure metrics related to kerberos login is updated.
MetricsRecordBuilder rb = getMetrics("UgiMetrics");
if (success > 0) {
assertCounter("LoginSuccessNumOps", success, rb);
assertGaugeGt("LoginSuccessAvgTime", 0, rb);
}
if (failure > 0) {
assertCounter("LoginFailureNumPos", failure, rb);
assertGaugeGt("LoginFailureAvgTime", 0, rb);
}
}
private static void verifyGroupMetrics(
long groups) throws InterruptedException {
MetricsRecordBuilder rb = getMetrics("UgiMetrics");
if (groups > 0) {
assertCounterGt("GetGroupsNumOps", groups-1, rb);
double avg = getDoubleGauge("GetGroupsAvgTime", rb);
assertTrue(avg >= 0.0);
// Sleep for an interval+slop to let the percentiles rollover
Thread.sleep((PERCENTILES_INTERVAL+1)*1000);
// Check that the percentiles were updated
assertQuantileGauges("GetGroups1s", rb);
}
}
/**
* Test for the case that UserGroupInformation.getCurrentUser()
* is called when the AccessControlContext has a Subject associated
* with it, but that Subject was not created by Hadoop (ie it has no
* associated User principal)
*/
@Test (timeout = 30000)
public void testUGIUnderNonHadoopContext() throws Exception {
Subject nonHadoopSubject = new Subject();
Subject.doAs(nonHadoopSubject, new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws IOException {
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
assertNotNull(ugi);
return null;
}
});
}
@Test (timeout = 30000)
public void testGetUGIFromSubject() throws Exception {
KerberosPrincipal p = new KerberosPrincipal("guest");
Subject subject = new Subject();
subject.getPrincipals().add(p);
UserGroupInformation ugi = UserGroupInformation.getUGIFromSubject(subject);
assertNotNull(ugi);
assertEquals("guest@DEFAULT.REALM", ugi.getUserName());
}
/** Test hasSufficientTimeElapsed method */
@Test (timeout = 30000)
public void testHasSufficientTimeElapsed() throws Exception {
// Make hasSufficientTimeElapsed public
Method method = UserGroupInformation.class
.getDeclaredMethod("hasSufficientTimeElapsed", long.class);
method.setAccessible(true);
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
User user = ugi.getSubject().getPrincipals(User.class).iterator().next();
long now = System.currentTimeMillis();
// Using default relogin time (1 minute)
user.setLastLogin(now - 2 * 60 * 1000); // 2 minutes before "now"
assertTrue((Boolean)method.invoke(ugi, now));
user.setLastLogin(now - 30 * 1000); // 30 seconds before "now"
assertFalse((Boolean)method.invoke(ugi, now));
// Using relogin time of 10 minutes
Configuration conf2 = new Configuration(conf);
conf2.setLong(
CommonConfigurationKeysPublic.HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN,
10 * 60);
UserGroupInformation.setConfiguration(conf2);
user.setLastLogin(now - 15 * 60 * 1000); // 15 minutes before "now"
assertTrue((Boolean)method.invoke(ugi, now));
user.setLastLogin(now - 6 * 60 * 1000); // 6 minutes before "now"
assertFalse((Boolean)method.invoke(ugi, now));
// Restore original conf to UGI
UserGroupInformation.setConfiguration(conf);
// Restore hasSufficientTimElapsed back to private
method.setAccessible(false);
}
@Test(timeout=1000)
public void testSetLoginUser() throws IOException {
UserGroupInformation ugi = UserGroupInformation.createRemoteUser("test-user");
UserGroupInformation.setLoginUser(ugi);
assertEquals(ugi, UserGroupInformation.getLoginUser());
}
/**
* In some scenario, such as HA, delegation tokens are associated with a
* logical name. The tokens are cloned and are associated with the
* physical address of the server where the service is provided.
* This test ensures cloned delegated tokens are locally used
* and are not returned in {@link UserGroupInformation#getCredentials()}
*/
@Test
public void testPrivateTokenExclusion() throws Exception {
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
TestTokenIdentifier tokenId = new TestTokenIdentifier();
Token<TestTokenIdentifier> token = new Token<TestTokenIdentifier>(
tokenId.getBytes(), "password".getBytes(),
tokenId.getKind(), null);
ugi.addToken(new Text("regular-token"), token);
// Now add cloned private token
ugi.addToken(new Text("private-token"), new Token.PrivateToken<TestTokenIdentifier>(token));
ugi.addToken(new Text("private-token1"), new Token.PrivateToken<TestTokenIdentifier>(token));
// Ensure only non-private tokens are returned
Collection<Token<? extends TokenIdentifier>> tokens = ugi.getCredentials().getAllTokens();
assertEquals(1, tokens.size());
}
/**
* This test checks a race condition between getting and adding tokens for
* the current user. Calling UserGroupInformation.getCurrentUser() returns
* a new object each time, so simply making these methods synchronized was not
* enough to prevent race conditions and causing a
* ConcurrentModificationException. These methods are synchronized on the
* Subject, which is the same object between UserGroupInformation instances.
* This test tries to cause a CME, by exposing the race condition. Previously
* this test would fail every time; now it does not.
*/
@Test
public void testTokenRaceCondition() throws Exception {
UserGroupInformation userGroupInfo =
UserGroupInformation.createUserForTesting(USER_NAME, GROUP_NAMES);
userGroupInfo.doAs(new PrivilegedExceptionAction<Void>(){
@Override
public Void run() throws Exception {
// make sure it is not the same as the login user because we use the
// same UGI object for every instantiation of the login user and you
// won't run into the race condition otherwise
assertNotEquals(UserGroupInformation.getLoginUser(),
UserGroupInformation.getCurrentUser());
GetTokenThread thread = new GetTokenThread();
try {
thread.start();
for (int i = 0; i < 100; i++) {
@SuppressWarnings("unchecked")
Token<? extends TokenIdentifier> t = mock(Token.class);
when(t.getService()).thenReturn(new Text("t" + i));
UserGroupInformation.getCurrentUser().addToken(t);
assertNull("ConcurrentModificationException encountered",
thread.cme);
}
} catch (ConcurrentModificationException cme) {
cme.printStackTrace();
fail("ConcurrentModificationException encountered");
} finally {
thread.runThread = false;
thread.join(5 * 1000);
}
return null;
}});
}
static class GetTokenThread extends Thread {
boolean runThread = true;
volatile ConcurrentModificationException cme = null;
@Override
public void run() {
while(runThread) {
try {
UserGroupInformation.getCurrentUser().getCredentials();
} catch (ConcurrentModificationException cme) {
this.cme = cme;
cme.printStackTrace();
runThread = false;
} catch (IOException ex) {
ex.printStackTrace();
}
}
}
}
}
|
googleapis/google-cloud-java | 36,045 | java-dialogflow/proto-google-cloud-dialogflow-v2/src/main/java/com/google/cloud/dialogflow/v2/MessageAnnotation.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/v2/participant.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.v2;
/**
*
*
* <pre>
* Represents the result of annotation for the message.
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2.MessageAnnotation}
*/
public final class MessageAnnotation extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2.MessageAnnotation)
MessageAnnotationOrBuilder {
private static final long serialVersionUID = 0L;
// Use MessageAnnotation.newBuilder() to construct.
private MessageAnnotation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private MessageAnnotation() {
parts_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new MessageAnnotation();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2.ParticipantProto
.internal_static_google_cloud_dialogflow_v2_MessageAnnotation_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2.ParticipantProto
.internal_static_google_cloud_dialogflow_v2_MessageAnnotation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2.MessageAnnotation.class,
com.google.cloud.dialogflow.v2.MessageAnnotation.Builder.class);
}
public static final int PARTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.dialogflow.v2.AnnotatedMessagePart> parts_;
/**
*
*
* <pre>
* The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.AnnotatedMessagePart parts = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.dialogflow.v2.AnnotatedMessagePart> getPartsList() {
return parts_;
}
/**
*
*
* <pre>
* The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.AnnotatedMessagePart parts = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.dialogflow.v2.AnnotatedMessagePartOrBuilder>
getPartsOrBuilderList() {
return parts_;
}
/**
*
*
* <pre>
* The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.AnnotatedMessagePart parts = 1;</code>
*/
@java.lang.Override
public int getPartsCount() {
return parts_.size();
}
/**
*
*
* <pre>
* The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.AnnotatedMessagePart parts = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.v2.AnnotatedMessagePart getParts(int index) {
return parts_.get(index);
}
/**
*
*
* <pre>
* The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.AnnotatedMessagePart parts = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.v2.AnnotatedMessagePartOrBuilder getPartsOrBuilder(int index) {
return parts_.get(index);
}
public static final int CONTAIN_ENTITIES_FIELD_NUMBER = 2;
private boolean containEntities_ = false;
/**
*
*
* <pre>
* Indicates whether the text message contains entities.
* </pre>
*
* <code>bool contain_entities = 2;</code>
*
* @return The containEntities.
*/
@java.lang.Override
public boolean getContainEntities() {
return containEntities_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < parts_.size(); i++) {
output.writeMessage(1, parts_.get(i));
}
if (containEntities_ != false) {
output.writeBool(2, containEntities_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < parts_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, parts_.get(i));
}
if (containEntities_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(2, containEntities_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.v2.MessageAnnotation)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.v2.MessageAnnotation other =
(com.google.cloud.dialogflow.v2.MessageAnnotation) obj;
if (!getPartsList().equals(other.getPartsList())) return false;
if (getContainEntities() != other.getContainEntities()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getPartsCount() > 0) {
hash = (37 * hash) + PARTS_FIELD_NUMBER;
hash = (53 * hash) + getPartsList().hashCode();
}
hash = (37 * hash) + CONTAIN_ENTITIES_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getContainEntities());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.v2.MessageAnnotation parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.MessageAnnotation parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.MessageAnnotation parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.MessageAnnotation parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.MessageAnnotation parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.MessageAnnotation parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.MessageAnnotation parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.MessageAnnotation parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.MessageAnnotation parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.MessageAnnotation parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.MessageAnnotation parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.MessageAnnotation parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.dialogflow.v2.MessageAnnotation prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Represents the result of annotation for the message.
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2.MessageAnnotation}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2.MessageAnnotation)
com.google.cloud.dialogflow.v2.MessageAnnotationOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2.ParticipantProto
.internal_static_google_cloud_dialogflow_v2_MessageAnnotation_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2.ParticipantProto
.internal_static_google_cloud_dialogflow_v2_MessageAnnotation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2.MessageAnnotation.class,
com.google.cloud.dialogflow.v2.MessageAnnotation.Builder.class);
}
// Construct using com.google.cloud.dialogflow.v2.MessageAnnotation.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (partsBuilder_ == null) {
parts_ = java.util.Collections.emptyList();
} else {
parts_ = null;
partsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
containEntities_ = false;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.v2.ParticipantProto
.internal_static_google_cloud_dialogflow_v2_MessageAnnotation_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.MessageAnnotation getDefaultInstanceForType() {
return com.google.cloud.dialogflow.v2.MessageAnnotation.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.MessageAnnotation build() {
com.google.cloud.dialogflow.v2.MessageAnnotation result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.MessageAnnotation buildPartial() {
com.google.cloud.dialogflow.v2.MessageAnnotation result =
new com.google.cloud.dialogflow.v2.MessageAnnotation(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.dialogflow.v2.MessageAnnotation result) {
if (partsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
parts_ = java.util.Collections.unmodifiableList(parts_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.parts_ = parts_;
} else {
result.parts_ = partsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.dialogflow.v2.MessageAnnotation result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.containEntities_ = containEntities_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.v2.MessageAnnotation) {
return mergeFrom((com.google.cloud.dialogflow.v2.MessageAnnotation) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.v2.MessageAnnotation other) {
if (other == com.google.cloud.dialogflow.v2.MessageAnnotation.getDefaultInstance())
return this;
if (partsBuilder_ == null) {
if (!other.parts_.isEmpty()) {
if (parts_.isEmpty()) {
parts_ = other.parts_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensurePartsIsMutable();
parts_.addAll(other.parts_);
}
onChanged();
}
} else {
if (!other.parts_.isEmpty()) {
if (partsBuilder_.isEmpty()) {
partsBuilder_.dispose();
partsBuilder_ = null;
parts_ = other.parts_;
bitField0_ = (bitField0_ & ~0x00000001);
partsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getPartsFieldBuilder()
: null;
} else {
partsBuilder_.addAllMessages(other.parts_);
}
}
}
if (other.getContainEntities() != false) {
setContainEntities(other.getContainEntities());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.dialogflow.v2.AnnotatedMessagePart m =
input.readMessage(
com.google.cloud.dialogflow.v2.AnnotatedMessagePart.parser(),
extensionRegistry);
if (partsBuilder_ == null) {
ensurePartsIsMutable();
parts_.add(m);
} else {
partsBuilder_.addMessage(m);
}
break;
} // case 10
case 16:
{
containEntities_ = input.readBool();
bitField0_ |= 0x00000002;
break;
} // case 16
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.dialogflow.v2.AnnotatedMessagePart> parts_ =
java.util.Collections.emptyList();
private void ensurePartsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
parts_ =
new java.util.ArrayList<com.google.cloud.dialogflow.v2.AnnotatedMessagePart>(parts_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.v2.AnnotatedMessagePart,
com.google.cloud.dialogflow.v2.AnnotatedMessagePart.Builder,
com.google.cloud.dialogflow.v2.AnnotatedMessagePartOrBuilder>
partsBuilder_;
/**
*
*
* <pre>
* The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.AnnotatedMessagePart parts = 1;</code>
*/
public java.util.List<com.google.cloud.dialogflow.v2.AnnotatedMessagePart> getPartsList() {
if (partsBuilder_ == null) {
return java.util.Collections.unmodifiableList(parts_);
} else {
return partsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.AnnotatedMessagePart parts = 1;</code>
*/
public int getPartsCount() {
if (partsBuilder_ == null) {
return parts_.size();
} else {
return partsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.AnnotatedMessagePart parts = 1;</code>
*/
public com.google.cloud.dialogflow.v2.AnnotatedMessagePart getParts(int index) {
if (partsBuilder_ == null) {
return parts_.get(index);
} else {
return partsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.AnnotatedMessagePart parts = 1;</code>
*/
public Builder setParts(int index, com.google.cloud.dialogflow.v2.AnnotatedMessagePart value) {
if (partsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePartsIsMutable();
parts_.set(index, value);
onChanged();
} else {
partsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.AnnotatedMessagePart parts = 1;</code>
*/
public Builder setParts(
int index, com.google.cloud.dialogflow.v2.AnnotatedMessagePart.Builder builderForValue) {
if (partsBuilder_ == null) {
ensurePartsIsMutable();
parts_.set(index, builderForValue.build());
onChanged();
} else {
partsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.AnnotatedMessagePart parts = 1;</code>
*/
public Builder addParts(com.google.cloud.dialogflow.v2.AnnotatedMessagePart value) {
if (partsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePartsIsMutable();
parts_.add(value);
onChanged();
} else {
partsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.AnnotatedMessagePart parts = 1;</code>
*/
public Builder addParts(int index, com.google.cloud.dialogflow.v2.AnnotatedMessagePart value) {
if (partsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePartsIsMutable();
parts_.add(index, value);
onChanged();
} else {
partsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.AnnotatedMessagePart parts = 1;</code>
*/
public Builder addParts(
com.google.cloud.dialogflow.v2.AnnotatedMessagePart.Builder builderForValue) {
if (partsBuilder_ == null) {
ensurePartsIsMutable();
parts_.add(builderForValue.build());
onChanged();
} else {
partsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.AnnotatedMessagePart parts = 1;</code>
*/
public Builder addParts(
int index, com.google.cloud.dialogflow.v2.AnnotatedMessagePart.Builder builderForValue) {
if (partsBuilder_ == null) {
ensurePartsIsMutable();
parts_.add(index, builderForValue.build());
onChanged();
} else {
partsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.AnnotatedMessagePart parts = 1;</code>
*/
public Builder addAllParts(
java.lang.Iterable<? extends com.google.cloud.dialogflow.v2.AnnotatedMessagePart> values) {
if (partsBuilder_ == null) {
ensurePartsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, parts_);
onChanged();
} else {
partsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.AnnotatedMessagePart parts = 1;</code>
*/
public Builder clearParts() {
if (partsBuilder_ == null) {
parts_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
partsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.AnnotatedMessagePart parts = 1;</code>
*/
public Builder removeParts(int index) {
if (partsBuilder_ == null) {
ensurePartsIsMutable();
parts_.remove(index);
onChanged();
} else {
partsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.AnnotatedMessagePart parts = 1;</code>
*/
public com.google.cloud.dialogflow.v2.AnnotatedMessagePart.Builder getPartsBuilder(int index) {
return getPartsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.AnnotatedMessagePart parts = 1;</code>
*/
public com.google.cloud.dialogflow.v2.AnnotatedMessagePartOrBuilder getPartsOrBuilder(
int index) {
if (partsBuilder_ == null) {
return parts_.get(index);
} else {
return partsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.AnnotatedMessagePart parts = 1;</code>
*/
public java.util.List<? extends com.google.cloud.dialogflow.v2.AnnotatedMessagePartOrBuilder>
getPartsOrBuilderList() {
if (partsBuilder_ != null) {
return partsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(parts_);
}
}
/**
*
*
* <pre>
* The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.AnnotatedMessagePart parts = 1;</code>
*/
public com.google.cloud.dialogflow.v2.AnnotatedMessagePart.Builder addPartsBuilder() {
return getPartsFieldBuilder()
.addBuilder(com.google.cloud.dialogflow.v2.AnnotatedMessagePart.getDefaultInstance());
}
/**
*
*
* <pre>
* The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.AnnotatedMessagePart parts = 1;</code>
*/
public com.google.cloud.dialogflow.v2.AnnotatedMessagePart.Builder addPartsBuilder(int index) {
return getPartsFieldBuilder()
.addBuilder(
index, com.google.cloud.dialogflow.v2.AnnotatedMessagePart.getDefaultInstance());
}
/**
*
*
* <pre>
* The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.AnnotatedMessagePart parts = 1;</code>
*/
public java.util.List<com.google.cloud.dialogflow.v2.AnnotatedMessagePart.Builder>
getPartsBuilderList() {
return getPartsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.v2.AnnotatedMessagePart,
com.google.cloud.dialogflow.v2.AnnotatedMessagePart.Builder,
com.google.cloud.dialogflow.v2.AnnotatedMessagePartOrBuilder>
getPartsFieldBuilder() {
if (partsBuilder_ == null) {
partsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.v2.AnnotatedMessagePart,
com.google.cloud.dialogflow.v2.AnnotatedMessagePart.Builder,
com.google.cloud.dialogflow.v2.AnnotatedMessagePartOrBuilder>(
parts_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
parts_ = null;
}
return partsBuilder_;
}
private boolean containEntities_;
/**
*
*
* <pre>
* Indicates whether the text message contains entities.
* </pre>
*
* <code>bool contain_entities = 2;</code>
*
* @return The containEntities.
*/
@java.lang.Override
public boolean getContainEntities() {
return containEntities_;
}
/**
*
*
* <pre>
* Indicates whether the text message contains entities.
* </pre>
*
* <code>bool contain_entities = 2;</code>
*
* @param value The containEntities to set.
* @return This builder for chaining.
*/
public Builder setContainEntities(boolean value) {
containEntities_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Indicates whether the text message contains entities.
* </pre>
*
* <code>bool contain_entities = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearContainEntities() {
bitField0_ = (bitField0_ & ~0x00000002);
containEntities_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2.MessageAnnotation)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2.MessageAnnotation)
private static final com.google.cloud.dialogflow.v2.MessageAnnotation DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2.MessageAnnotation();
}
public static com.google.cloud.dialogflow.v2.MessageAnnotation getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<MessageAnnotation> PARSER =
new com.google.protobuf.AbstractParser<MessageAnnotation>() {
@java.lang.Override
public MessageAnnotation parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<MessageAnnotation> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<MessageAnnotation> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.MessageAnnotation getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,978 | java-dataproc-metastore/proto-google-cloud-dataproc-metastore-v1beta/src/main/java/com/google/cloud/metastore/v1beta/AlterMetadataResourceLocationRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/metastore/v1beta/metastore.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.metastore.v1beta;
/**
*
*
* <pre>
* Request message for
* [DataprocMetastore.AlterMetadataResourceLocation][google.cloud.metastore.v1beta.DataprocMetastore.AlterMetadataResourceLocation].
* </pre>
*
* Protobuf type {@code google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest}
*/
public final class AlterMetadataResourceLocationRequest
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest)
AlterMetadataResourceLocationRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use AlterMetadataResourceLocationRequest.newBuilder() to construct.
private AlterMetadataResourceLocationRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private AlterMetadataResourceLocationRequest() {
service_ = "";
resourceName_ = "";
locationUri_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new AlterMetadataResourceLocationRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.metastore.v1beta.MetastoreProto
.internal_static_google_cloud_metastore_v1beta_AlterMetadataResourceLocationRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.metastore.v1beta.MetastoreProto
.internal_static_google_cloud_metastore_v1beta_AlterMetadataResourceLocationRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest.class,
com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest.Builder.class);
}
public static final int SERVICE_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object service_ = "";
/**
*
*
* <pre>
* Required. The relative resource name of the metastore service to mutate
* metadata, in the following format:
*
* `projects/{project_id}/locations/{location_id}/services/{service_id}`.
* </pre>
*
* <code>
* string service = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The service.
*/
@java.lang.Override
public java.lang.String getService() {
java.lang.Object ref = service_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
service_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The relative resource name of the metastore service to mutate
* metadata, in the following format:
*
* `projects/{project_id}/locations/{location_id}/services/{service_id}`.
* </pre>
*
* <code>
* string service = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for service.
*/
@java.lang.Override
public com.google.protobuf.ByteString getServiceBytes() {
java.lang.Object ref = service_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
service_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int RESOURCE_NAME_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object resourceName_ = "";
/**
*
*
* <pre>
* Required. The relative metadata resource name in the following format.
*
* `databases/{database_id}`
* or
* `databases/{database_id}/tables/{table_id}`
* or
* `databases/{database_id}/tables/{table_id}/partitions/{partition_id}`
* </pre>
*
* <code>string resource_name = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The resourceName.
*/
@java.lang.Override
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The relative metadata resource name in the following format.
*
* `databases/{database_id}`
* or
* `databases/{database_id}/tables/{table_id}`
* or
* `databases/{database_id}/tables/{table_id}/partitions/{partition_id}`
* </pre>
*
* <code>string resource_name = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for resourceName.
*/
@java.lang.Override
public com.google.protobuf.ByteString getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int LOCATION_URI_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object locationUri_ = "";
/**
*
*
* <pre>
* Required. The new location URI for the metadata resource.
* </pre>
*
* <code>string location_uri = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The locationUri.
*/
@java.lang.Override
public java.lang.String getLocationUri() {
java.lang.Object ref = locationUri_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
locationUri_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The new location URI for the metadata resource.
* </pre>
*
* <code>string location_uri = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for locationUri.
*/
@java.lang.Override
public com.google.protobuf.ByteString getLocationUriBytes() {
java.lang.Object ref = locationUri_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
locationUri_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(service_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, service_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, resourceName_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(locationUri_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, locationUri_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(service_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, service_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, resourceName_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(locationUri_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, locationUri_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest)) {
return super.equals(obj);
}
com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest other =
(com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest) obj;
if (!getService().equals(other.getService())) return false;
if (!getResourceName().equals(other.getResourceName())) return false;
if (!getLocationUri().equals(other.getLocationUri())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + SERVICE_FIELD_NUMBER;
hash = (53 * hash) + getService().hashCode();
hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getResourceName().hashCode();
hash = (37 * hash) + LOCATION_URI_FIELD_NUMBER;
hash = (53 * hash) + getLocationUri().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [DataprocMetastore.AlterMetadataResourceLocation][google.cloud.metastore.v1beta.DataprocMetastore.AlterMetadataResourceLocation].
* </pre>
*
* Protobuf type {@code google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest)
com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.metastore.v1beta.MetastoreProto
.internal_static_google_cloud_metastore_v1beta_AlterMetadataResourceLocationRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.metastore.v1beta.MetastoreProto
.internal_static_google_cloud_metastore_v1beta_AlterMetadataResourceLocationRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest.class,
com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest.Builder.class);
}
// Construct using
// com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
service_ = "";
resourceName_ = "";
locationUri_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.metastore.v1beta.MetastoreProto
.internal_static_google_cloud_metastore_v1beta_AlterMetadataResourceLocationRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest
getDefaultInstanceForType() {
return com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest build() {
com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest buildPartial() {
com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest result =
new com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.service_ = service_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.resourceName_ = resourceName_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.locationUri_ = locationUri_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest) {
return mergeFrom(
(com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest other) {
if (other
== com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest
.getDefaultInstance()) return this;
if (!other.getService().isEmpty()) {
service_ = other.service_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getResourceName().isEmpty()) {
resourceName_ = other.resourceName_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getLocationUri().isEmpty()) {
locationUri_ = other.locationUri_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
service_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
resourceName_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
locationUri_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object service_ = "";
/**
*
*
* <pre>
* Required. The relative resource name of the metastore service to mutate
* metadata, in the following format:
*
* `projects/{project_id}/locations/{location_id}/services/{service_id}`.
* </pre>
*
* <code>
* string service = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The service.
*/
public java.lang.String getService() {
java.lang.Object ref = service_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
service_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The relative resource name of the metastore service to mutate
* metadata, in the following format:
*
* `projects/{project_id}/locations/{location_id}/services/{service_id}`.
* </pre>
*
* <code>
* string service = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for service.
*/
public com.google.protobuf.ByteString getServiceBytes() {
java.lang.Object ref = service_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
service_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The relative resource name of the metastore service to mutate
* metadata, in the following format:
*
* `projects/{project_id}/locations/{location_id}/services/{service_id}`.
* </pre>
*
* <code>
* string service = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The service to set.
* @return This builder for chaining.
*/
public Builder setService(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
service_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The relative resource name of the metastore service to mutate
* metadata, in the following format:
*
* `projects/{project_id}/locations/{location_id}/services/{service_id}`.
* </pre>
*
* <code>
* string service = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearService() {
service_ = getDefaultInstance().getService();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The relative resource name of the metastore service to mutate
* metadata, in the following format:
*
* `projects/{project_id}/locations/{location_id}/services/{service_id}`.
* </pre>
*
* <code>
* string service = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for service to set.
* @return This builder for chaining.
*/
public Builder setServiceBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
service_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object resourceName_ = "";
/**
*
*
* <pre>
* Required. The relative metadata resource name in the following format.
*
* `databases/{database_id}`
* or
* `databases/{database_id}/tables/{table_id}`
* or
* `databases/{database_id}/tables/{table_id}/partitions/{partition_id}`
* </pre>
*
* <code>string resource_name = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The resourceName.
*/
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The relative metadata resource name in the following format.
*
* `databases/{database_id}`
* or
* `databases/{database_id}/tables/{table_id}`
* or
* `databases/{database_id}/tables/{table_id}/partitions/{partition_id}`
* </pre>
*
* <code>string resource_name = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for resourceName.
*/
public com.google.protobuf.ByteString getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The relative metadata resource name in the following format.
*
* `databases/{database_id}`
* or
* `databases/{database_id}/tables/{table_id}`
* or
* `databases/{database_id}/tables/{table_id}/partitions/{partition_id}`
* </pre>
*
* <code>string resource_name = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
resourceName_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The relative metadata resource name in the following format.
*
* `databases/{database_id}`
* or
* `databases/{database_id}/tables/{table_id}`
* or
* `databases/{database_id}/tables/{table_id}/partitions/{partition_id}`
* </pre>
*
* <code>string resource_name = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearResourceName() {
resourceName_ = getDefaultInstance().getResourceName();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The relative metadata resource name in the following format.
*
* `databases/{database_id}`
* or
* `databases/{database_id}/tables/{table_id}`
* or
* `databases/{database_id}/tables/{table_id}/partitions/{partition_id}`
* </pre>
*
* <code>string resource_name = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
resourceName_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object locationUri_ = "";
/**
*
*
* <pre>
* Required. The new location URI for the metadata resource.
* </pre>
*
* <code>string location_uri = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The locationUri.
*/
public java.lang.String getLocationUri() {
java.lang.Object ref = locationUri_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
locationUri_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The new location URI for the metadata resource.
* </pre>
*
* <code>string location_uri = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for locationUri.
*/
public com.google.protobuf.ByteString getLocationUriBytes() {
java.lang.Object ref = locationUri_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
locationUri_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The new location URI for the metadata resource.
* </pre>
*
* <code>string location_uri = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The locationUri to set.
* @return This builder for chaining.
*/
public Builder setLocationUri(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
locationUri_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The new location URI for the metadata resource.
* </pre>
*
* <code>string location_uri = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearLocationUri() {
locationUri_ = getDefaultInstance().getLocationUri();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The new location URI for the metadata resource.
* </pre>
*
* <code>string location_uri = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for locationUri to set.
* @return This builder for chaining.
*/
public Builder setLocationUriBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
locationUri_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest)
private static final com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest();
}
public static com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<AlterMetadataResourceLocationRequest> PARSER =
new com.google.protobuf.AbstractParser<AlterMetadataResourceLocationRequest>() {
@java.lang.Override
public AlterMetadataResourceLocationRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<AlterMetadataResourceLocationRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<AlterMetadataResourceLocationRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.metastore.v1beta.AlterMetadataResourceLocationRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googlesamples/glass-enterprise-samples | 36,135 | WebRTCSample/app/src/org/appspot/apprtcstandalone/BaseCallActivity.java | package org.appspot.apprtcstandalone;
import android.annotation.TargetApi;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.media.projection.MediaProjection;
import android.media.projection.MediaProjectionManager;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.view.WindowManager.LayoutParams;
import android.widget.Toast;
import java.io.IOException;
import java.lang.RuntimeException;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import javax.annotation.Nullable;
import org.appspot.apprtcstandalone.AppRTCAudioManager.AudioDevice;
import org.appspot.apprtcstandalone.AppRTCAudioManager.AudioManagerEvents;
import org.appspot.apprtcstandalone.AppRTCClient.RoomConnectionParameters;
import org.appspot.apprtcstandalone.AppRTCClient.SignalingParameters;
import org.appspot.apprtcstandalone.PeerConnectionClient.DataChannelParameters;
import org.appspot.apprtcstandalone.PeerConnectionClient.PeerConnectionParameters;
import org.webrtc.Camera1Enumerator;
import org.webrtc.Camera2Enumerator;
import org.webrtc.CameraEnumerator;
import org.webrtc.EglBase;
import org.webrtc.FileVideoCapturer;
import org.webrtc.IceCandidate;
import org.webrtc.Logging;
import org.webrtc.PeerConnectionFactory;
import org.webrtc.RendererCommon.ScalingType;
import org.webrtc.ScreenCapturerAndroid;
import org.webrtc.SessionDescription;
import org.webrtc.SurfaceViewRenderer;
import org.webrtc.VideoCapturer;
import org.webrtc.VideoFileRenderer;
import org.webrtc.VideoFrame;
import org.webrtc.VideoSink;
public abstract class BaseCallActivity extends Activity implements AppRTCClient.SignalingEvents,
PeerConnectionClient.PeerConnectionEvents,
GlassCallFragment.OnCallEvents {
private static final String TAG = "CallRTCClient";
public static final String EXTRA_ROOMID = "org.appspot.apprtc.ROOMID";
public static final String EXTRA_URLPARAMETERS = "org.appspot.apprtc.URLPARAMETERS";
public static final String EXTRA_LOOPBACK = "org.appspot.apprtc.LOOPBACK";
public static final String EXTRA_VIDEO_CALL = "org.appspot.apprtc.VIDEO_CALL";
public static final String EXTRA_SCREENCAPTURE = "org.appspot.apprtc.SCREENCAPTURE";
public static final String EXTRA_CAMERA2 = "org.appspot.apprtc.CAMERA2";
public static final String EXTRA_VIDEO_WIDTH = "org.appspot.apprtc.VIDEO_WIDTH";
public static final String EXTRA_VIDEO_HEIGHT = "org.appspot.apprtc.VIDEO_HEIGHT";
public static final String EXTRA_VIDEO_FPS = "org.appspot.apprtc.VIDEO_FPS";
public static final String EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED =
"org.appsopt.apprtc.VIDEO_CAPTUREQUALITYSLIDER";
public static final String EXTRA_VIDEO_BITRATE = "org.appspot.apprtc.VIDEO_BITRATE";
public static final String EXTRA_VIDEOCODEC = "org.appspot.apprtc.VIDEOCODEC";
public static final String EXTRA_HWCODEC_ENABLED = "org.appspot.apprtc.HWCODEC";
public static final String EXTRA_CAPTURETOTEXTURE_ENABLED = "org.appspot.apprtc.CAPTURETOTEXTURE";
public static final String EXTRA_FLEXFEC_ENABLED = "org.appspot.apprtc.FLEXFEC";
public static final String EXTRA_AUDIO_BITRATE = "org.appspot.apprtc.AUDIO_BITRATE";
public static final String EXTRA_AUDIOCODEC = "org.appspot.apprtc.AUDIOCODEC";
public static final String EXTRA_NOAUDIOPROCESSING_ENABLED =
"org.appspot.apprtc.NOAUDIOPROCESSING";
public static final String EXTRA_AECDUMP_ENABLED = "org.appspot.apprtc.AECDUMP";
public static final String EXTRA_SAVE_INPUT_AUDIO_TO_FILE_ENABLED =
"org.appspot.apprtc.SAVE_INPUT_AUDIO_TO_FILE";
public static final String EXTRA_OPENSLES_ENABLED = "org.appspot.apprtc.OPENSLES";
public static final String EXTRA_DISABLE_BUILT_IN_AEC = "org.appspot.apprtc.DISABLE_BUILT_IN_AEC";
public static final String EXTRA_DISABLE_BUILT_IN_AGC = "org.appspot.apprtc.DISABLE_BUILT_IN_AGC";
public static final String EXTRA_DISABLE_BUILT_IN_NS = "org.appspot.apprtc.DISABLE_BUILT_IN_NS";
public static final String EXTRA_DISABLE_WEBRTC_AGC_AND_HPF =
"org.appspot.apprtc.DISABLE_WEBRTC_GAIN_CONTROL";
public static final String EXTRA_DISPLAY_HUD = "org.appspot.apprtc.DISPLAY_HUD";
public static final String EXTRA_TRACING = "org.appspot.apprtc.TRACING";
public static final String EXTRA_CMDLINE = "org.appspot.apprtc.CMDLINE";
public static final String EXTRA_RUNTIME = "org.appspot.apprtc.RUNTIME";
public static final String EXTRA_VIDEO_FILE_AS_CAMERA = "org.appspot.apprtc.VIDEO_FILE_AS_CAMERA";
public static final String EXTRA_SAVE_REMOTE_VIDEO_TO_FILE =
"org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE";
public static final String EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH =
"org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_WIDTH";
public static final String EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT =
"org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT";
public static final String EXTRA_USE_VALUES_FROM_INTENT =
"org.appspot.apprtc.USE_VALUES_FROM_INTENT";
public static final String EXTRA_DATA_CHANNEL_ENABLED = "org.appspot.apprtc.DATA_CHANNEL_ENABLED";
public static final String EXTRA_ORDERED = "org.appspot.apprtc.ORDERED";
public static final String EXTRA_MAX_RETRANSMITS_MS = "org.appspot.apprtc.MAX_RETRANSMITS_MS";
public static final String EXTRA_MAX_RETRANSMITS = "org.appspot.apprtc.MAX_RETRANSMITS";
public static final String EXTRA_PROTOCOL = "org.appspot.apprtc.PROTOCOL";
public static final String EXTRA_NEGOTIATED = "org.appspot.apprtc.NEGOTIATED";
public static final String EXTRA_ID = "org.appspot.apprtc.ID";
public static final String EXTRA_ENABLE_RTCEVENTLOG = "org.appspot.apprtc.ENABLE_RTCEVENTLOG";
public static final String EXTRA_USE_LEGACY_AUDIO_DEVICE =
"org.appspot.apprtc.USE_LEGACY_AUDIO_DEVICE";
private static final int CAPTURE_PERMISSION_REQUEST_CODE = 1;
// List of mandatory application permissions.
private static final String[] MANDATORY_PERMISSIONS = {"android.permission.MODIFY_AUDIO_SETTINGS",
"android.permission.RECORD_AUDIO", "android.permission.INTERNET"};
// Peer connection statistics callback period in ms.
private static final int STAT_CALLBACK_PERIOD = 1000;
private static class ProxyVideoSink implements VideoSink {
private VideoSink target;
@Override
synchronized public void onFrame(VideoFrame frame) {
if (target == null) {
Logging.d(TAG, "Dropping frame in proxy because target is null.");
return;
}
target.onFrame(frame);
}
synchronized public void setTarget(VideoSink target) {
this.target = target;
}
}
private final BaseCallActivity.ProxyVideoSink remoteProxyRenderer = new BaseCallActivity.ProxyVideoSink();
private final BaseCallActivity.ProxyVideoSink localProxyVideoSink = new BaseCallActivity.ProxyVideoSink();
@Nullable
private PeerConnectionClient peerConnectionClient = null;
@Nullable
private AppRTCClient appRtcClient;
@Nullable
private SignalingParameters signalingParameters;
@Nullable
private AppRTCAudioManager audioManager = null;
@Nullable
private SurfaceViewRenderer pipRenderer;
@Nullable
protected SurfaceViewRenderer fullscreenRenderer;
@Nullable
private VideoFileRenderer videoFileRenderer;
private final List<VideoSink> remoteSinks = new ArrayList<>();
private Toast logToast;
private boolean commandLineRun;
private boolean activityRunning;
private RoomConnectionParameters roomConnectionParameters;
@Nullable
private PeerConnectionParameters peerConnectionParameters;
protected boolean iceConnected;
protected boolean isError;
private long callStartedTimeMs = 0;
private boolean micEnabled = true;
private boolean screencaptureEnabled = false;
private static Intent mediaProjectionPermissionResultData;
private static int mediaProjectionPermissionResultCode;
// True if local view is in the fullscreen renderer.
private boolean isSwappedFeeds;
@Override
// TODO(bugs.webrtc.org/8580): LayoutParams.FLAG_TURN_SCREEN_ON and
// LayoutParams.FLAG_SHOW_WHEN_LOCKED are deprecated.
@SuppressWarnings("deprecation")
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Thread.setDefaultUncaughtExceptionHandler(new UnhandledExceptionHandler(this));
// Set window styles for fullscreen-window size. Needs to be done before
// adding content.
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(LayoutParams.FLAG_FULLSCREEN | LayoutParams.FLAG_KEEP_SCREEN_ON
| LayoutParams.FLAG_SHOW_WHEN_LOCKED | LayoutParams.FLAG_TURN_SCREEN_ON);
getWindow().getDecorView().setSystemUiVisibility(getSystemUiVisibility());
setContentView(R.layout.activity_call);
iceConnected = false;
signalingParameters = null;
// Create UI controls.
pipRenderer = findViewById(R.id.pip_video_view);
fullscreenRenderer = findViewById(R.id.fullscreen_video_view);
// Swap feeds on pip view click.
pipRenderer.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
setSwappedFeeds(!isSwappedFeeds);
}
});
remoteSinks.add(remoteProxyRenderer);
final Intent intent = getIntent();
final EglBase eglBase = EglBase.create();
// Create video renderers.
pipRenderer.init(eglBase.getEglBaseContext(), null);
pipRenderer.setScalingType(ScalingType.SCALE_ASPECT_FIT);
String saveRemoteVideoToFile = intent.getStringExtra(EXTRA_SAVE_REMOTE_VIDEO_TO_FILE);
// When saveRemoteVideoToFile is set we save the video from the remote to a file.
if (saveRemoteVideoToFile != null) {
int videoOutWidth = intent.getIntExtra(EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH, 0);
int videoOutHeight = intent.getIntExtra(EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT, 0);
try {
videoFileRenderer = new VideoFileRenderer(
saveRemoteVideoToFile, videoOutWidth, videoOutHeight, eglBase.getEglBaseContext());
remoteSinks.add(videoFileRenderer);
} catch (IOException e) {
throw new RuntimeException(
"Failed to open video file for output: " + saveRemoteVideoToFile, e);
}
}
fullscreenRenderer.init(eglBase.getEglBaseContext(), null);
fullscreenRenderer.setScalingType(ScalingType.SCALE_ASPECT_FILL);
pipRenderer.setZOrderMediaOverlay(true);
pipRenderer.setEnableHardwareScaler(true /* enabled */);
fullscreenRenderer.setEnableHardwareScaler(false /* enabled */);
// Start with local feed in fullscreen and swap it to the pip when the call is connected.
setSwappedFeeds(true /* isSwappedFeeds */);
// Check for mandatory permissions.
for (String permission : MANDATORY_PERMISSIONS) {
if (checkCallingOrSelfPermission(permission) != PackageManager.PERMISSION_GRANTED) {
logAndToast("Permission " + permission + " is not granted");
setResult(RESULT_CANCELED);
finish();
return;
}
}
Uri roomUri = intent.getData();
if (roomUri == null) {
logAndToast(getString(R.string.missing_url));
Log.e(TAG, "Didn't get any URL in intent!");
setResult(RESULT_CANCELED);
finish();
return;
}
// Get Intent parameters.
String roomId = intent.getStringExtra(EXTRA_ROOMID);
Log.d(TAG, "Room ID: " + roomId);
if (roomId == null || roomId.length() == 0) {
logAndToast(getString(R.string.missing_url));
Log.e(TAG, "Incorrect room ID in intent!");
setResult(RESULT_CANCELED);
finish();
return;
}
boolean loopback = intent.getBooleanExtra(EXTRA_LOOPBACK, false);
boolean tracing = intent.getBooleanExtra(EXTRA_TRACING, false);
int videoWidth = intent.getIntExtra(EXTRA_VIDEO_WIDTH, 0);
int videoHeight = intent.getIntExtra(EXTRA_VIDEO_HEIGHT, 0);
screencaptureEnabled = intent.getBooleanExtra(EXTRA_SCREENCAPTURE, false);
// If capturing format is not specified for screencapture, use screen resolution.
if (screencaptureEnabled && videoWidth == 0 && videoHeight == 0) {
DisplayMetrics displayMetrics = getDisplayMetrics();
videoWidth = displayMetrics.widthPixels;
videoHeight = displayMetrics.heightPixels;
}
DataChannelParameters dataChannelParameters = null;
if (intent.getBooleanExtra(EXTRA_DATA_CHANNEL_ENABLED, false)) {
dataChannelParameters = new DataChannelParameters(intent.getBooleanExtra(EXTRA_ORDERED, true),
intent.getIntExtra(EXTRA_MAX_RETRANSMITS_MS, -1),
intent.getIntExtra(EXTRA_MAX_RETRANSMITS, -1), intent.getStringExtra(EXTRA_PROTOCOL),
intent.getBooleanExtra(EXTRA_NEGOTIATED, false), intent.getIntExtra(EXTRA_ID, -1));
}
peerConnectionParameters =
new PeerConnectionParameters(intent.getBooleanExtra(EXTRA_VIDEO_CALL, true), loopback,
tracing, videoWidth, videoHeight, intent.getIntExtra(EXTRA_VIDEO_FPS, 0),
intent.getIntExtra(EXTRA_VIDEO_BITRATE, 0), intent.getStringExtra(EXTRA_VIDEOCODEC),
intent.getBooleanExtra(EXTRA_HWCODEC_ENABLED, true),
intent.getBooleanExtra(EXTRA_FLEXFEC_ENABLED, false),
intent.getIntExtra(EXTRA_AUDIO_BITRATE, 0), intent.getStringExtra(EXTRA_AUDIOCODEC),
intent.getBooleanExtra(EXTRA_NOAUDIOPROCESSING_ENABLED, false),
intent.getBooleanExtra(EXTRA_AECDUMP_ENABLED, false),
intent.getBooleanExtra(EXTRA_SAVE_INPUT_AUDIO_TO_FILE_ENABLED, false),
intent.getBooleanExtra(EXTRA_OPENSLES_ENABLED, false),
intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_AEC, false),
intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_AGC, false),
intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_NS, false),
intent.getBooleanExtra(EXTRA_DISABLE_WEBRTC_AGC_AND_HPF, false),
intent.getBooleanExtra(EXTRA_ENABLE_RTCEVENTLOG, false),
intent.getBooleanExtra(EXTRA_USE_LEGACY_AUDIO_DEVICE, false), dataChannelParameters);
commandLineRun = intent.getBooleanExtra(EXTRA_CMDLINE, false);
int runTimeMs = intent.getIntExtra(EXTRA_RUNTIME, 0);
Log.d(TAG, "VIDEO_FILE: '" + intent.getStringExtra(EXTRA_VIDEO_FILE_AS_CAMERA) + "'");
// Create connection client. Use DirectRTCClient if room name is an IP otherwise use the
// standard WebSocketRTCClient.
if (loopback || !DirectRTCClient.IP_PATTERN.matcher(roomId).matches()) {
appRtcClient = new WebSocketRTCClient(this);
} else {
Log.i(TAG, "Using DirectRTCClient because room name looks like an IP.");
appRtcClient = new DirectRTCClient(this);
}
// Create connection parameters.
String urlParameters = intent.getStringExtra(EXTRA_URLPARAMETERS);
roomConnectionParameters =
new RoomConnectionParameters(roomUri.toString(), roomId, loopback, urlParameters);
// For command line execution run connection for <runTimeMs> and exit.
if (commandLineRun && runTimeMs > 0) {
(new Handler()).postDelayed(new Runnable() {
@Override
public void run() {
disconnect();
}
}, runTimeMs);
}
// Create peer connection client.
peerConnectionClient = new PeerConnectionClient(
getApplicationContext(), eglBase, peerConnectionParameters, BaseCallActivity.this);
PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
if (loopback) {
options.networkIgnoreMask = 0;
}
peerConnectionClient.createPeerConnectionFactory(options);
if (screencaptureEnabled) {
startScreenCapture();
} else {
startCall();
}
}
@TargetApi(17)
private DisplayMetrics getDisplayMetrics() {
DisplayMetrics displayMetrics = new DisplayMetrics();
WindowManager windowManager =
(WindowManager) getApplication().getSystemService(Context.WINDOW_SERVICE);
windowManager.getDefaultDisplay().getRealMetrics(displayMetrics);
return displayMetrics;
}
@TargetApi(19)
private static int getSystemUiVisibility() {
int flags = View.SYSTEM_UI_FLAG_HIDE_NAVIGATION | View.SYSTEM_UI_FLAG_FULLSCREEN;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
flags |= View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY;
}
return flags;
}
@TargetApi(21)
private void startScreenCapture() {
MediaProjectionManager mediaProjectionManager =
(MediaProjectionManager) getApplication().getSystemService(
Context.MEDIA_PROJECTION_SERVICE);
startActivityForResult(
mediaProjectionManager.createScreenCaptureIntent(), CAPTURE_PERMISSION_REQUEST_CODE);
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode != CAPTURE_PERMISSION_REQUEST_CODE)
return;
mediaProjectionPermissionResultCode = resultCode;
mediaProjectionPermissionResultData = data;
startCall();
}
private boolean useCamera2() {
return Camera2Enumerator.isSupported(this) && getIntent().getBooleanExtra(EXTRA_CAMERA2, true);
}
private boolean captureToTexture() {
return getIntent().getBooleanExtra(EXTRA_CAPTURETOTEXTURE_ENABLED, false);
}
private @Nullable VideoCapturer createCameraCapturer(CameraEnumerator enumerator) {
final String[] deviceNames = enumerator.getDeviceNames();
// First, try to find front facing camera
Logging.d(TAG, "Looking for front facing cameras.");
for (String deviceName : deviceNames) {
if (enumerator.isFrontFacing(deviceName)) {
Logging.d(TAG, "Creating front facing camera capturer.");
VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
if (videoCapturer != null) {
return videoCapturer;
}
}
}
// Front facing camera not found, try something else
Logging.d(TAG, "Looking for other cameras.");
for (String deviceName : deviceNames) {
if (!enumerator.isFrontFacing(deviceName)) {
Logging.d(TAG, "Creating other camera capturer.");
VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
if (videoCapturer != null) {
return videoCapturer;
}
}
}
return null;
}
@TargetApi(21)
private @Nullable VideoCapturer createScreenCapturer() {
if (mediaProjectionPermissionResultCode != Activity.RESULT_OK) {
reportError("User didn't give permission to capture the screen.");
return null;
}
return new ScreenCapturerAndroid(
mediaProjectionPermissionResultData, new MediaProjection.Callback() {
@Override
public void onStop() {
reportError("User revoked permission to capture the screen.");
}
});
}
// Activity interfaces
@Override
public void onStop() {
super.onStop();
activityRunning = false;
// Don't stop the video when using screencapture to allow user to show other apps to the remote
// end.
if (peerConnectionClient != null && !screencaptureEnabled) {
peerConnectionClient.stopVideoSource();
}
}
@Override
public void onStart() {
super.onStart();
activityRunning = true;
// Video is not paused for screencapture. See onPause.
if (peerConnectionClient != null && !screencaptureEnabled) {
peerConnectionClient.startVideoSource();
}
}
@Override
protected void onDestroy() {
Thread.setDefaultUncaughtExceptionHandler(null);
disconnect();
if (logToast != null) {
logToast.cancel();
}
activityRunning = false;
super.onDestroy();
}
// CallFragment.OnCallEvents interface implementation.
@Override
public void onCallHangUp() {
disconnect();
}
@Override
public void onCameraSwitch() {
if (peerConnectionClient != null) {
peerConnectionClient.switchCamera();
}
}
@Override
public void onVideoScalingSwitch(ScalingType scalingType) {
fullscreenRenderer.setScalingType(scalingType);
}
@Override
public void onCaptureFormatChange(int width, int height, int framerate) {
if (peerConnectionClient != null) {
peerConnectionClient.changeCaptureFormat(width, height, framerate);
}
}
@Override
public boolean onToggleMic() {
if (peerConnectionClient != null) {
micEnabled = !micEnabled;
peerConnectionClient.setAudioEnabled(micEnabled);
}
return micEnabled;
}
private void startCall() {
if (appRtcClient == null) {
Log.e(TAG, "AppRTC client is not allocated for a call.");
return;
}
callStartedTimeMs = System.currentTimeMillis();
// Start room connection.
logAndToast(getString(R.string.connecting_to, roomConnectionParameters.roomUrl));
appRtcClient.connectToRoom(roomConnectionParameters);
// Create and audio manager that will take care of audio routing,
// audio modes, audio device enumeration etc.
audioManager = AppRTCAudioManager.create(getApplicationContext());
// Store existing audio settings and change audio mode to
// MODE_IN_COMMUNICATION for best possible VoIP performance.
Log.d(TAG, "Starting the audio manager...");
audioManager.start(new AudioManagerEvents() {
// This method will be called each time the number of available audio
// devices has changed.
@Override
public void onAudioDeviceChanged(
AudioDevice audioDevice, Set<AudioDevice> availableAudioDevices) {
onAudioManagerDevicesChanged(audioDevice, availableAudioDevices);
}
});
}
// Should be called from UI thread
private void callConnected() {
final long delta = System.currentTimeMillis() - callStartedTimeMs;
Log.i(TAG, "Call connected: delay=" + delta + "ms");
if (peerConnectionClient == null || isError) {
Log.w(TAG, "Call is connected in closed or error state");
return;
}
// Enable statistics callback.
peerConnectionClient.enableStatsEvents(true, STAT_CALLBACK_PERIOD);
setSwappedFeeds(false /* isSwappedFeeds */);
}
// This method is called when the audio manager reports audio device change,
// e.g. from wired headset to speakerphone.
private void onAudioManagerDevicesChanged(
final AudioDevice device, final Set<AudioDevice> availableDevices) {
Log.d(TAG, "onAudioManagerDevicesChanged: " + availableDevices + ", "
+ "selected: " + device);
// TODO(henrika): add callback handler.
}
// Disconnect from remote resources, dispose of local resources, and exit.
protected void disconnect() {
activityRunning = false;
remoteProxyRenderer.setTarget(null);
localProxyVideoSink.setTarget(null);
if (appRtcClient != null) {
appRtcClient.disconnectFromRoom();
appRtcClient = null;
}
if (pipRenderer != null) {
pipRenderer.release();
pipRenderer = null;
}
if (videoFileRenderer != null) {
videoFileRenderer.release();
videoFileRenderer = null;
}
if (fullscreenRenderer != null) {
fullscreenRenderer.release();
fullscreenRenderer = null;
}
if (peerConnectionClient != null) {
peerConnectionClient.close();
peerConnectionClient = null;
}
if (audioManager != null) {
audioManager.stop();
audioManager = null;
}
if (iceConnected && !isError) {
setResult(RESULT_OK);
} else {
setResult(RESULT_CANCELED);
}
finish();
}
private void disconnectWithErrorMessage(final String errorMessage) {
if (commandLineRun || !activityRunning) {
Log.e(TAG, "Critical error: " + errorMessage);
disconnect();
} else {
new AlertDialog.Builder(this)
.setTitle(getText(R.string.channel_error_title))
.setMessage(errorMessage)
.setCancelable(false)
.setNeutralButton(R.string.ok,
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
disconnect();
}
})
.create()
.show();
}
}
// Log |msg| and Toast about it.
private void logAndToast(String msg) {
Log.d(TAG, msg);
if (logToast != null) {
logToast.cancel();
}
logToast = Toast.makeText(this, msg, Toast.LENGTH_SHORT);
logToast.show();
}
private void reportError(final String description) {
runOnUiThread(new Runnable() {
@Override
public void run() {
if (!isError) {
isError = true;
disconnectWithErrorMessage(description);
}
}
});
}
private @Nullable VideoCapturer createVideoCapturer() {
final VideoCapturer videoCapturer;
String videoFileAsCamera = getIntent().getStringExtra(EXTRA_VIDEO_FILE_AS_CAMERA);
if (videoFileAsCamera != null) {
try {
videoCapturer = new FileVideoCapturer(videoFileAsCamera);
} catch (IOException e) {
reportError("Failed to open video file for emulated camera");
return null;
}
} else if (screencaptureEnabled) {
return createScreenCapturer();
} else if (useCamera2()) {
if (!captureToTexture()) {
reportError(getString(R.string.camera2_texture_only_error));
return null;
}
Logging.d(TAG, "Creating capturer using camera2 API.");
videoCapturer = createCameraCapturer(new Camera2Enumerator(this));
} else {
Logging.d(TAG, "Creating capturer using camera1 API.");
videoCapturer = createCameraCapturer(new Camera1Enumerator(captureToTexture()));
}
if (videoCapturer == null) {
reportError("Failed to open camera");
return null;
}
return videoCapturer;
}
private void setSwappedFeeds(boolean isSwappedFeeds) {
Logging.d(TAG, "setSwappedFeeds: " + isSwappedFeeds);
this.isSwappedFeeds = isSwappedFeeds;
localProxyVideoSink.setTarget(isSwappedFeeds ? fullscreenRenderer : pipRenderer);
remoteProxyRenderer.setTarget(isSwappedFeeds ? pipRenderer : fullscreenRenderer);
fullscreenRenderer.setMirror(isSwappedFeeds);
pipRenderer.setMirror(!isSwappedFeeds);
}
// -----Implementation of AppRTCClient.AppRTCSignalingEvents ---------------
// All callbacks are invoked from websocket signaling looper thread and
// are routed to UI thread.
private void onConnectedToRoomInternal(final SignalingParameters params) {
final long delta = System.currentTimeMillis() - callStartedTimeMs;
signalingParameters = params;
logAndToast("Creating peer connection, delay=" + delta + "ms");
VideoCapturer videoCapturer = null;
if (peerConnectionParameters.videoCallEnabled) {
videoCapturer = createVideoCapturer();
}
peerConnectionClient.createPeerConnection(
localProxyVideoSink, remoteSinks, videoCapturer, signalingParameters);
if (signalingParameters.initiator) {
logAndToast("Creating OFFER...");
// Create offer. Offer SDP will be sent to answering client in
// PeerConnectionEvents.onLocalDescription event.
peerConnectionClient.createOffer();
} else {
if (params.offerSdp != null) {
peerConnectionClient.setRemoteDescription(params.offerSdp);
logAndToast("Creating ANSWER...");
// Create answer. Answer SDP will be sent to offering client in
// PeerConnectionEvents.onLocalDescription event.
peerConnectionClient.createAnswer();
}
if (params.iceCandidates != null) {
// Add remote ICE candidates from room.
for (IceCandidate iceCandidate : params.iceCandidates) {
peerConnectionClient.addRemoteIceCandidate(iceCandidate);
}
}
}
}
@Override
public void onConnectedToRoom(final SignalingParameters params) {
runOnUiThread(new Runnable() {
@Override
public void run() {
onConnectedToRoomInternal(params);
}
});
}
@Override
public void onRemoteDescription(final SessionDescription sdp) {
final long delta = System.currentTimeMillis() - callStartedTimeMs;
runOnUiThread(new Runnable() {
@Override
public void run() {
if (peerConnectionClient == null) {
Log.e(TAG, "Received remote SDP for non-initilized peer connection.");
return;
}
logAndToast("Received remote " + sdp.type + ", delay=" + delta + "ms");
peerConnectionClient.setRemoteDescription(sdp);
if (!signalingParameters.initiator) {
logAndToast("Creating ANSWER...");
// Create answer. Answer SDP will be sent to offering client in
// PeerConnectionEvents.onLocalDescription event.
peerConnectionClient.createAnswer();
}
}
});
}
@Override
public void onRemoteIceCandidate(final IceCandidate candidate) {
runOnUiThread(new Runnable() {
@Override
public void run() {
if (peerConnectionClient == null) {
Log.e(TAG, "Received ICE candidate for a non-initialized peer connection.");
return;
}
peerConnectionClient.addRemoteIceCandidate(candidate);
}
});
}
@Override
public void onRemoteIceCandidatesRemoved(final IceCandidate[] candidates) {
runOnUiThread(new Runnable() {
@Override
public void run() {
if (peerConnectionClient == null) {
Log.e(TAG, "Received ICE candidate removals for a non-initialized peer connection.");
return;
}
peerConnectionClient.removeRemoteIceCandidates(candidates);
}
});
}
@Override
public void onChannelClose() {
runOnUiThread(new Runnable() {
@Override
public void run() {
logAndToast("Remote end hung up; dropping PeerConnection");
disconnect();
}
});
}
@Override
public void onChannelError(final String description) {
reportError(description);
}
// -----Implementation of PeerConnectionClient.PeerConnectionEvents.---------
// Send local peer connection SDP and ICE candidates to remote party.
// All callbacks are invoked from peer connection client looper thread and
// are routed to UI thread.
@Override
public void onLocalDescription(final SessionDescription sdp) {
final long delta = System.currentTimeMillis() - callStartedTimeMs;
runOnUiThread(new Runnable() {
@Override
public void run() {
if (appRtcClient != null) {
logAndToast("Sending " + sdp.type + ", delay=" + delta + "ms");
if (signalingParameters.initiator) {
appRtcClient.sendOfferSdp(sdp);
} else {
appRtcClient.sendAnswerSdp(sdp);
}
}
if (peerConnectionParameters.videoMaxBitrate > 0) {
Log.d(TAG, "Set video maximum bitrate: " + peerConnectionParameters.videoMaxBitrate);
peerConnectionClient.setVideoMaxBitrate(peerConnectionParameters.videoMaxBitrate);
}
}
});
}
@Override
public void onIceCandidate(final IceCandidate candidate) {
runOnUiThread(new Runnable() {
@Override
public void run() {
if (appRtcClient != null) {
appRtcClient.sendLocalIceCandidate(candidate);
}
}
});
}
@Override
public void onIceCandidatesRemoved(final IceCandidate[] candidates) {
runOnUiThread(new Runnable() {
@Override
public void run() {
if (appRtcClient != null) {
appRtcClient.sendLocalIceCandidateRemovals(candidates);
}
}
});
}
@Override
public void onIceConnected() {
final long delta = System.currentTimeMillis() - callStartedTimeMs;
runOnUiThread(new Runnable() {
@Override
public void run() {
logAndToast("ICE connected, delay=" + delta + "ms");
iceConnected = true;
callConnected();
}
});
}
@Override
public void onIceDisconnected() {
runOnUiThread(new Runnable() {
@Override
public void run() {
logAndToast("ICE disconnected");
iceConnected = false;
disconnect();
}
});
}
@Override
public void onPeerConnectionClosed() {}
@Override
public void onPeerConnectionError(final String description) {
reportError(description);
}
}
|
openjdk/jdk8 | 35,841 | hotspot/test/compiler/6340864/TestLongVect.java | /*
* Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
/**
* @test
* @bug 6340864
* @summary Implement vectorization optimizations in hotspot-server
*
* @run main/othervm/timeout=400 -Xbatch -Xmx64m TestLongVect
*/
public class TestLongVect {
private static final int ARRLEN = 997;
private static final int ITERS = 11000;
private static final long ADD_INIT = Long.MAX_VALUE-500;
private static final long BIT_MASK = 0xEC80F731EC80F731L;
private static final int VALUE = 31;
private static final int SHIFT = 64;
public static void main(String args[]) {
System.out.println("Testing Long vectors");
int errn = test();
if (errn > 0) {
System.err.println("FAILED: " + errn + " errors");
System.exit(97);
}
System.out.println("PASSED");
}
static int test() {
long[] a0 = new long[ARRLEN];
long[] a1 = new long[ARRLEN];
long[] a2 = new long[ARRLEN];
long[] a3 = new long[ARRLEN];
long[] a4 = new long[ARRLEN];
// Initialize
long gold_sum = 0;
for (int i=0; i<ARRLEN; i++) {
long val = (long)(ADD_INIT+i);
gold_sum += val;
a1[i] = val;
a2[i] = (long)VALUE;
a3[i] = (long)-VALUE;
a4[i] = (long)BIT_MASK;
}
System.out.println("Warmup");
for (int i=0; i<ITERS; i++) {
test_sum(a1);
test_addc(a0, a1);
test_addv(a0, a1, (long)VALUE);
test_adda(a0, a1, a2);
test_subc(a0, a1);
test_subv(a0, a1, (long)VALUE);
test_suba(a0, a1, a2);
test_mulc(a0, a1);
test_mulv(a0, a1, (long)VALUE);
test_mula(a0, a1, a2);
test_divc(a0, a1);
test_divv(a0, a1, (long)VALUE);
test_diva(a0, a1, a2);
test_mulc_n(a0, a1);
test_mulv(a0, a1, (long)-VALUE);
test_mula(a0, a1, a3);
test_divc_n(a0, a1);
test_divv(a0, a1, (long)-VALUE);
test_diva(a0, a1, a3);
test_andc(a0, a1);
test_andv(a0, a1, (long)BIT_MASK);
test_anda(a0, a1, a4);
test_orc(a0, a1);
test_orv(a0, a1, (long)BIT_MASK);
test_ora(a0, a1, a4);
test_xorc(a0, a1);
test_xorv(a0, a1, (long)BIT_MASK);
test_xora(a0, a1, a4);
test_sllc(a0, a1);
test_sllv(a0, a1, VALUE);
test_srlc(a0, a1);
test_srlv(a0, a1, VALUE);
test_srac(a0, a1);
test_srav(a0, a1, VALUE);
test_sllc_n(a0, a1);
test_sllv(a0, a1, -VALUE);
test_srlc_n(a0, a1);
test_srlv(a0, a1, -VALUE);
test_srac_n(a0, a1);
test_srav(a0, a1, -VALUE);
test_sllc_o(a0, a1);
test_sllv(a0, a1, SHIFT);
test_srlc_o(a0, a1);
test_srlv(a0, a1, SHIFT);
test_srac_o(a0, a1);
test_srav(a0, a1, SHIFT);
test_sllc_on(a0, a1);
test_sllv(a0, a1, -SHIFT);
test_srlc_on(a0, a1);
test_srlv(a0, a1, -SHIFT);
test_srac_on(a0, a1);
test_srav(a0, a1, -SHIFT);
test_sllc_add(a0, a1);
test_sllv_add(a0, a1, ADD_INIT);
test_srlc_add(a0, a1);
test_srlv_add(a0, a1, ADD_INIT);
test_srac_add(a0, a1);
test_srav_add(a0, a1, ADD_INIT);
test_sllc_and(a0, a1);
test_sllv_and(a0, a1, BIT_MASK);
test_srlc_and(a0, a1);
test_srlv_and(a0, a1, BIT_MASK);
test_srac_and(a0, a1);
test_srav_and(a0, a1, BIT_MASK);
}
// Test and verify results
System.out.println("Verification");
int errn = 0;
{
long sum = test_sum(a1);
if (sum != gold_sum) {
System.err.println("test_sum: " + sum + " != " + gold_sum);
errn++;
}
test_addc(a0, a1);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_addc: ", i, a0[i], (long)((long)(ADD_INIT+i)+VALUE));
}
test_addv(a0, a1, (long)VALUE);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_addv: ", i, a0[i], (long)((long)(ADD_INIT+i)+VALUE));
}
test_adda(a0, a1, a2);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_adda: ", i, a0[i], (long)((long)(ADD_INIT+i)+VALUE));
}
test_subc(a0, a1);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_subc: ", i, a0[i], (long)((long)(ADD_INIT+i)-VALUE));
}
test_subv(a0, a1, (long)VALUE);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_subv: ", i, a0[i], (long)((long)(ADD_INIT+i)-VALUE));
}
test_suba(a0, a1, a2);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_suba: ", i, a0[i], (long)((long)(ADD_INIT+i)-VALUE));
}
test_mulc(a0, a1);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_mulc: ", i, a0[i], (long)((long)(ADD_INIT+i)*VALUE));
}
test_mulv(a0, a1, (long)VALUE);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_mulv: ", i, a0[i], (long)((long)(ADD_INIT+i)*VALUE));
}
test_mula(a0, a1, a2);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_mula: ", i, a0[i], (long)((long)(ADD_INIT+i)*VALUE));
}
test_divc(a0, a1);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_divc: ", i, a0[i], (long)((long)(ADD_INIT+i)/VALUE));
}
test_divv(a0, a1, (long)VALUE);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_divv: ", i, a0[i], (long)((long)(ADD_INIT+i)/VALUE));
}
test_diva(a0, a1, a2);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_diva: ", i, a0[i], (long)((long)(ADD_INIT+i)/VALUE));
}
test_mulc_n(a0, a1);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_mulc_n: ", i, a0[i], (long)((long)(ADD_INIT+i)*(-VALUE)));
}
test_mulv(a0, a1, (long)-VALUE);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_mulv_n: ", i, a0[i], (long)((long)(ADD_INIT+i)*(-VALUE)));
}
test_mula(a0, a1, a3);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_mula_n: ", i, a0[i], (long)((long)(ADD_INIT+i)*(-VALUE)));
}
test_divc_n(a0, a1);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_divc_n: ", i, a0[i], (long)((long)(ADD_INIT+i)/(-VALUE)));
}
test_divv(a0, a1, (long)-VALUE);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_divv_n: ", i, a0[i], (long)((long)(ADD_INIT+i)/(-VALUE)));
}
test_diva(a0, a1, a3);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_diva_n: ", i, a0[i], (long)((long)(ADD_INIT+i)/(-VALUE)));
}
test_andc(a0, a1);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_andc: ", i, a0[i], (long)((long)(ADD_INIT+i)&BIT_MASK));
}
test_andv(a0, a1, (long)BIT_MASK);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_andv: ", i, a0[i], (long)((long)(ADD_INIT+i)&BIT_MASK));
}
test_anda(a0, a1, a4);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_anda: ", i, a0[i], (long)((long)(ADD_INIT+i)&BIT_MASK));
}
test_orc(a0, a1);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_orc: ", i, a0[i], (long)((long)(ADD_INIT+i)|BIT_MASK));
}
test_orv(a0, a1, (long)BIT_MASK);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_orv: ", i, a0[i], (long)((long)(ADD_INIT+i)|BIT_MASK));
}
test_ora(a0, a1, a4);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_ora: ", i, a0[i], (long)((long)(ADD_INIT+i)|BIT_MASK));
}
test_xorc(a0, a1);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_xorc: ", i, a0[i], (long)((long)(ADD_INIT+i)^BIT_MASK));
}
test_xorv(a0, a1, (long)BIT_MASK);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_xorv: ", i, a0[i], (long)((long)(ADD_INIT+i)^BIT_MASK));
}
test_xora(a0, a1, a4);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_xora: ", i, a0[i], (long)((long)(ADD_INIT+i)^BIT_MASK));
}
test_sllc(a0, a1);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_sllc: ", i, a0[i], (long)((long)(ADD_INIT+i)<<VALUE));
}
test_sllv(a0, a1, VALUE);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_sllv: ", i, a0[i], (long)((long)(ADD_INIT+i)<<VALUE));
}
test_srlc(a0, a1);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_srlc: ", i, a0[i], (long)((long)(ADD_INIT+i)>>>VALUE));
}
test_srlv(a0, a1, VALUE);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_srlv: ", i, a0[i], (long)((long)(ADD_INIT+i)>>>VALUE));
}
test_srac(a0, a1);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_srac: ", i, a0[i], (long)((long)(ADD_INIT+i)>>VALUE));
}
test_srav(a0, a1, VALUE);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_srav: ", i, a0[i], (long)((long)(ADD_INIT+i)>>VALUE));
}
test_sllc_n(a0, a1);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_sllc_n: ", i, a0[i], (long)((long)(ADD_INIT+i)<<(-VALUE)));
}
test_sllv(a0, a1, -VALUE);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_sllv_n: ", i, a0[i], (long)((long)(ADD_INIT+i)<<(-VALUE)));
}
test_srlc_n(a0, a1);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_srlc_n: ", i, a0[i], (long)((long)(ADD_INIT+i)>>>(-VALUE)));
}
test_srlv(a0, a1, -VALUE);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_srlv_n: ", i, a0[i], (long)((long)(ADD_INIT+i)>>>(-VALUE)));
}
test_srac_n(a0, a1);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_srac_n: ", i, a0[i], (long)((long)(ADD_INIT+i)>>(-VALUE)));
}
test_srav(a0, a1, -VALUE);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_srav_n: ", i, a0[i], (long)((long)(ADD_INIT+i)>>(-VALUE)));
}
test_sllc_o(a0, a1);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_sllc_o: ", i, a0[i], (long)((long)(ADD_INIT+i)<<SHIFT));
}
test_sllv(a0, a1, SHIFT);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_sllv_o: ", i, a0[i], (long)((long)(ADD_INIT+i)<<SHIFT));
}
test_srlc_o(a0, a1);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_srlc_o: ", i, a0[i], (long)((long)(ADD_INIT+i)>>>SHIFT));
}
test_srlv(a0, a1, SHIFT);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_srlv_o: ", i, a0[i], (long)((long)(ADD_INIT+i)>>>SHIFT));
}
test_srac_o(a0, a1);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_srac_o: ", i, a0[i], (long)((long)(ADD_INIT+i)>>SHIFT));
}
test_srav(a0, a1, SHIFT);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_srav_o: ", i, a0[i], (long)((long)(ADD_INIT+i)>>SHIFT));
}
test_sllc_on(a0, a1);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_sllc_on: ", i, a0[i], (long)((long)(ADD_INIT+i)<<(-SHIFT)));
}
test_sllv(a0, a1, -SHIFT);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_sllv_on: ", i, a0[i], (long)((long)(ADD_INIT+i)<<(-SHIFT)));
}
test_srlc_on(a0, a1);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_srlc_on: ", i, a0[i], (long)((long)(ADD_INIT+i)>>>(-SHIFT)));
}
test_srlv(a0, a1, -SHIFT);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_srlv_on: ", i, a0[i], (long)((long)(ADD_INIT+i)>>>(-SHIFT)));
}
test_srac_on(a0, a1);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_srac_on: ", i, a0[i], (long)((long)(ADD_INIT+i)>>(-SHIFT)));
}
test_srav(a0, a1, -SHIFT);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_srav_on: ", i, a0[i], (long)((long)(ADD_INIT+i)>>(-SHIFT)));
}
test_sllc_add(a0, a1);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_sllc_add: ", i, a0[i], (long)(((long)(ADD_INIT+i) + ADD_INIT)<<VALUE));
}
test_sllv_add(a0, a1, ADD_INIT);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_sllv_add: ", i, a0[i], (long)(((long)(ADD_INIT+i) + ADD_INIT)<<VALUE));
}
test_srlc_add(a0, a1);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_srlc_add: ", i, a0[i], (long)(((long)(ADD_INIT+i) + ADD_INIT)>>>VALUE));
}
test_srlv_add(a0, a1, ADD_INIT);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_srlv_add: ", i, a0[i], (long)(((long)(ADD_INIT+i) + ADD_INIT)>>>VALUE));
}
test_srac_add(a0, a1);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_srac_add: ", i, a0[i], (long)(((long)(ADD_INIT+i) + ADD_INIT)>>VALUE));
}
test_srav_add(a0, a1, ADD_INIT);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_srav_add: ", i, a0[i], (long)(((long)(ADD_INIT+i) + ADD_INIT)>>VALUE));
}
test_sllc_and(a0, a1);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_sllc_and: ", i, a0[i], (long)(((long)(ADD_INIT+i) & BIT_MASK)<<VALUE));
}
test_sllv_and(a0, a1, BIT_MASK);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_sllv_and: ", i, a0[i], (long)(((long)(ADD_INIT+i) & BIT_MASK)<<VALUE));
}
test_srlc_and(a0, a1);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_srlc_and: ", i, a0[i], (long)(((long)(ADD_INIT+i) & BIT_MASK)>>>VALUE));
}
test_srlv_and(a0, a1, BIT_MASK);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_srlv_and: ", i, a0[i], (long)(((long)(ADD_INIT+i) & BIT_MASK)>>>VALUE));
}
test_srac_and(a0, a1);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_srac_and: ", i, a0[i], (long)(((long)(ADD_INIT+i) & BIT_MASK)>>VALUE));
}
test_srav_and(a0, a1, BIT_MASK);
for (int i=0; i<ARRLEN; i++) {
errn += verify("test_srav_and: ", i, a0[i], (long)(((long)(ADD_INIT+i) & BIT_MASK)>>VALUE));
}
}
if (errn > 0)
return errn;
System.out.println("Time");
long start, end;
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_sum(a1);
}
end = System.currentTimeMillis();
System.out.println("test_sum: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_addc(a0, a1);
}
end = System.currentTimeMillis();
System.out.println("test_addc: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_addv(a0, a1, (long)VALUE);
}
end = System.currentTimeMillis();
System.out.println("test_addv: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_adda(a0, a1, a2);
}
end = System.currentTimeMillis();
System.out.println("test_adda: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_subc(a0, a1);
}
end = System.currentTimeMillis();
System.out.println("test_subc: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_subv(a0, a1, (long)VALUE);
}
end = System.currentTimeMillis();
System.out.println("test_subv: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_suba(a0, a1, a2);
}
end = System.currentTimeMillis();
System.out.println("test_suba: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_mulc(a0, a1);
}
end = System.currentTimeMillis();
System.out.println("test_mulc: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_mulv(a0, a1, (long)VALUE);
}
end = System.currentTimeMillis();
System.out.println("test_mulv: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_mula(a0, a1, a2);
}
end = System.currentTimeMillis();
System.out.println("test_mula: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_divc(a0, a1);
}
end = System.currentTimeMillis();
System.out.println("test_divc: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_divv(a0, a1, (long)VALUE);
}
end = System.currentTimeMillis();
System.out.println("test_divv: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_diva(a0, a1, a2);
}
end = System.currentTimeMillis();
System.out.println("test_diva: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_mulc_n(a0, a1);
}
end = System.currentTimeMillis();
System.out.println("test_mulc_n: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_mulv(a0, a1, (long)-VALUE);
}
end = System.currentTimeMillis();
System.out.println("test_mulv_n: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_mula(a0, a1, a3);
}
end = System.currentTimeMillis();
System.out.println("test_mula_n: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_divc_n(a0, a1);
}
end = System.currentTimeMillis();
System.out.println("test_divc_n: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_divv(a0, a1, (long)-VALUE);
}
end = System.currentTimeMillis();
System.out.println("test_divv_n: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_diva(a0, a1, a3);
}
end = System.currentTimeMillis();
System.out.println("test_diva_n: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_andc(a0, a1);
}
end = System.currentTimeMillis();
System.out.println("test_andc: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_andv(a0, a1, (long)BIT_MASK);
}
end = System.currentTimeMillis();
System.out.println("test_andv: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_anda(a0, a1, a4);
}
end = System.currentTimeMillis();
System.out.println("test_anda: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_orc(a0, a1);
}
end = System.currentTimeMillis();
System.out.println("test_orc: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_orv(a0, a1, (long)BIT_MASK);
}
end = System.currentTimeMillis();
System.out.println("test_orv: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_ora(a0, a1, a4);
}
end = System.currentTimeMillis();
System.out.println("test_ora: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_xorc(a0, a1);
}
end = System.currentTimeMillis();
System.out.println("test_xorc: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_xorv(a0, a1, (long)BIT_MASK);
}
end = System.currentTimeMillis();
System.out.println("test_xorv: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_xora(a0, a1, a4);
}
end = System.currentTimeMillis();
System.out.println("test_xora: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_sllc(a0, a1);
}
end = System.currentTimeMillis();
System.out.println("test_sllc: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_sllv(a0, a1, VALUE);
}
end = System.currentTimeMillis();
System.out.println("test_sllv: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_srlc(a0, a1);
}
end = System.currentTimeMillis();
System.out.println("test_srlc: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_srlv(a0, a1, VALUE);
}
end = System.currentTimeMillis();
System.out.println("test_srlv: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_srac(a0, a1);
}
end = System.currentTimeMillis();
System.out.println("test_srac: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_srav(a0, a1, VALUE);
}
end = System.currentTimeMillis();
System.out.println("test_srav: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_sllc_n(a0, a1);
}
end = System.currentTimeMillis();
System.out.println("test_sllc_n: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_sllv(a0, a1, -VALUE);
}
end = System.currentTimeMillis();
System.out.println("test_sllv_n: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_srlc_n(a0, a1);
}
end = System.currentTimeMillis();
System.out.println("test_srlc_n: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_srlv(a0, a1, -VALUE);
}
end = System.currentTimeMillis();
System.out.println("test_srlv_n: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_srac_n(a0, a1);
}
end = System.currentTimeMillis();
System.out.println("test_srac_n: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_srav(a0, a1, -VALUE);
}
end = System.currentTimeMillis();
System.out.println("test_srav_n: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_sllc_o(a0, a1);
}
end = System.currentTimeMillis();
System.out.println("test_sllc_o: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_sllv(a0, a1, SHIFT);
}
end = System.currentTimeMillis();
System.out.println("test_sllv_o: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_srlc_o(a0, a1);
}
end = System.currentTimeMillis();
System.out.println("test_srlc_o: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_srlv(a0, a1, SHIFT);
}
end = System.currentTimeMillis();
System.out.println("test_srlv_o: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_srac_o(a0, a1);
}
end = System.currentTimeMillis();
System.out.println("test_srac_o: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_srav(a0, a1, SHIFT);
}
end = System.currentTimeMillis();
System.out.println("test_srav_o: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_sllc_on(a0, a1);
}
end = System.currentTimeMillis();
System.out.println("test_sllc_on: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_sllv(a0, a1, -SHIFT);
}
end = System.currentTimeMillis();
System.out.println("test_sllv_on: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_srlc_on(a0, a1);
}
end = System.currentTimeMillis();
System.out.println("test_srlc_on: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_srlv(a0, a1, -SHIFT);
}
end = System.currentTimeMillis();
System.out.println("test_srlv_on: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_srac_on(a0, a1);
}
end = System.currentTimeMillis();
System.out.println("test_srac_on: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_srav(a0, a1, -SHIFT);
}
end = System.currentTimeMillis();
System.out.println("test_srav_on: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_sllc_add(a0, a1);
}
end = System.currentTimeMillis();
System.out.println("test_sllc_add: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_sllv_add(a0, a1, ADD_INIT);
}
end = System.currentTimeMillis();
System.out.println("test_sllv_add: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_srlc_add(a0, a1);
}
end = System.currentTimeMillis();
System.out.println("test_srlc_add: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_srlv_add(a0, a1, ADD_INIT);
}
end = System.currentTimeMillis();
System.out.println("test_srlv_add: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_srac_add(a0, a1);
}
end = System.currentTimeMillis();
System.out.println("test_srac_add: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_srav_add(a0, a1, ADD_INIT);
}
end = System.currentTimeMillis();
System.out.println("test_srav_add: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_sllc_and(a0, a1);
}
end = System.currentTimeMillis();
System.out.println("test_sllc_and: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_sllv_and(a0, a1, BIT_MASK);
}
end = System.currentTimeMillis();
System.out.println("test_sllv_and: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_srlc_and(a0, a1);
}
end = System.currentTimeMillis();
System.out.println("test_srlc_and: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_srlv_and(a0, a1, BIT_MASK);
}
end = System.currentTimeMillis();
System.out.println("test_srlv_and: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_srac_and(a0, a1);
}
end = System.currentTimeMillis();
System.out.println("test_srac_and: " + (end - start));
start = System.currentTimeMillis();
for (int i=0; i<ITERS; i++) {
test_srav_and(a0, a1, BIT_MASK);
}
end = System.currentTimeMillis();
System.out.println("test_srav_and: " + (end - start));
return errn;
}
static long test_sum(long[] a1) {
long sum = 0;
for (int i = 0; i < a1.length; i+=1) {
sum += a1[i];
}
return sum;
}
static void test_addc(long[] a0, long[] a1) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]+VALUE);
}
}
static void test_addv(long[] a0, long[] a1, long b) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]+b);
}
}
static void test_adda(long[] a0, long[] a1, long[] a2) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]+a2[i]);
}
}
static void test_subc(long[] a0, long[] a1) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]-VALUE);
}
}
static void test_subv(long[] a0, long[] a1, long b) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]-b);
}
}
static void test_suba(long[] a0, long[] a1, long[] a2) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]-a2[i]);
}
}
static void test_mulc(long[] a0, long[] a1) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]*VALUE);
}
}
static void test_mulc_n(long[] a0, long[] a1) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]*(-VALUE));
}
}
static void test_mulv(long[] a0, long[] a1, long b) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]*b);
}
}
static void test_mula(long[] a0, long[] a1, long[] a2) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]*a2[i]);
}
}
static void test_divc(long[] a0, long[] a1) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]/VALUE);
}
}
static void test_divc_n(long[] a0, long[] a1) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]/(-VALUE));
}
}
static void test_divv(long[] a0, long[] a1, long b) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]/b);
}
}
static void test_diva(long[] a0, long[] a1, long[] a2) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]/a2[i]);
}
}
static void test_andc(long[] a0, long[] a1) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]&BIT_MASK);
}
}
static void test_andv(long[] a0, long[] a1, long b) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]&b);
}
}
static void test_anda(long[] a0, long[] a1, long[] a2) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]&a2[i]);
}
}
static void test_orc(long[] a0, long[] a1) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]|BIT_MASK);
}
}
static void test_orv(long[] a0, long[] a1, long b) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]|b);
}
}
static void test_ora(long[] a0, long[] a1, long[] a2) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]|a2[i]);
}
}
static void test_xorc(long[] a0, long[] a1) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]^BIT_MASK);
}
}
static void test_xorv(long[] a0, long[] a1, long b) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]^b);
}
}
static void test_xora(long[] a0, long[] a1, long[] a2) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]^a2[i]);
}
}
static void test_sllc(long[] a0, long[] a1) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]<<VALUE);
}
}
static void test_sllc_n(long[] a0, long[] a1) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]<<(-VALUE));
}
}
static void test_sllc_o(long[] a0, long[] a1) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]<<SHIFT);
}
}
static void test_sllc_on(long[] a0, long[] a1) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]<<(-SHIFT));
}
}
static void test_sllv(long[] a0, long[] a1, int b) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]<<b);
}
}
static void test_sllc_add(long[] a0, long[] a1) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)((a1[i] + ADD_INIT)<<VALUE);
}
}
static void test_sllv_add(long[] a0, long[] a1, long b) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)((a1[i] + b)<<VALUE);
}
}
static void test_sllc_and(long[] a0, long[] a1) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)((a1[i] & BIT_MASK)<<VALUE);
}
}
static void test_sllv_and(long[] a0, long[] a1, long b) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)((a1[i] & b)<<VALUE);
}
}
static void test_srlc(long[] a0, long[] a1) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]>>>VALUE);
}
}
static void test_srlc_n(long[] a0, long[] a1) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]>>>(-VALUE));
}
}
static void test_srlc_o(long[] a0, long[] a1) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]>>>SHIFT);
}
}
static void test_srlc_on(long[] a0, long[] a1) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]>>>(-SHIFT));
}
}
static void test_srlv(long[] a0, long[] a1, int b) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]>>>b);
}
}
static void test_srlc_add(long[] a0, long[] a1) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)((a1[i] + ADD_INIT)>>>VALUE);
}
}
static void test_srlv_add(long[] a0, long[] a1, long b) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)((a1[i] + b)>>>VALUE);
}
}
static void test_srlc_and(long[] a0, long[] a1) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)((a1[i] & BIT_MASK)>>>VALUE);
}
}
static void test_srlv_and(long[] a0, long[] a1, long b) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)((a1[i] & b)>>>VALUE);
}
}
static void test_srac(long[] a0, long[] a1) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]>>VALUE);
}
}
static void test_srac_n(long[] a0, long[] a1) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]>>(-VALUE));
}
}
static void test_srac_o(long[] a0, long[] a1) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]>>SHIFT);
}
}
static void test_srac_on(long[] a0, long[] a1) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]>>(-SHIFT));
}
}
static void test_srav(long[] a0, long[] a1, int b) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)(a1[i]>>b);
}
}
static void test_srac_add(long[] a0, long[] a1) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)((a1[i] + ADD_INIT)>>VALUE);
}
}
static void test_srav_add(long[] a0, long[] a1, long b) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)((a1[i] + b)>>VALUE);
}
}
static void test_srac_and(long[] a0, long[] a1) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)((a1[i] & BIT_MASK)>>VALUE);
}
}
static void test_srav_and(long[] a0, long[] a1, long b) {
for (int i = 0; i < a0.length; i+=1) {
a0[i] = (long)((a1[i] & b)>>VALUE);
}
}
static int verify(String text, int i, long elem, long val) {
if (elem != val) {
System.err.println(text + "[" + i + "] = " + elem + " != " + val);
return 1;
}
return 0;
}
}
|
google/agera | 36,197 | extensions/rvdatabinding/src/test/java/com/google/android/agera/rvdatabinding/DataBindingRepositoryPresentersTest.java | /*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.agera.rvdatabinding;
import static android.databinding.DataBinderMapper.setDataBinding;
import static com.google.android.agera.Result.failure;
import static com.google.android.agera.Result.present;
import static com.google.android.agera.Result.success;
import static com.google.android.agera.rvdatabinding.DataBindingRepositoryPresenters.dataBindingRepositoryPresenterOf;
import static com.google.android.agera.rvdatabinding.RecycleConfig.CLEAR_ALL;
import static com.google.android.agera.rvdatabinding.RecycleConfig.CLEAR_COLLECTION;
import static com.google.android.agera.rvdatabinding.RecycleConfig.CLEAR_HANDLERS;
import static com.google.android.agera.rvdatabinding.RecycleConfig.CLEAR_ITEM;
import static com.google.android.agera.rvdatabinding.RecycleConfig.DO_NOTHING;
import static com.google.android.agera.rvdatabinding.test.VerifyingWrappers.verifyingWrapper;
import static com.google.android.agera.rvdatabinding.test.matchers.HasPrivateConstructor.hasPrivateConstructor;
import static java.lang.String.valueOf;
import static java.util.Arrays.asList;
import static java.util.Collections.singletonList;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
import static org.mockito.MockitoAnnotations.initMocks;
import android.databinding.ViewDataBinding;
import android.support.annotation.LayoutRes;
import android.support.annotation.NonNull;
import android.support.v7.util.DiffUtil;
import android.support.v7.util.ListUpdateCallback;
import android.support.v7.widget.RecyclerView.ViewHolder;
import android.view.View;
import com.google.android.agera.Function;
import com.google.android.agera.Functions;
import com.google.android.agera.Result;
import com.google.android.agera.rvadapter.RepositoryPresenter;
import com.google.android.agera.rvdatabinding.test.DiffingLogic;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.annotation.Config;
@RunWith(RobolectricTestRunner.class)
@Config(manifest = Config.NONE)
public class DataBindingRepositoryPresentersTest {
private static final String STRING = "string";
private static final String FIRST_STRING_CHARACTER = "s";
private static final String SECOND_STRING = "string2";
private static final Result<String> STRING_RESULT = present(STRING);
private static final List<String> STRING_LIST = asList(STRING, SECOND_STRING);
private static final Result<List<String>> STRING_LIST_RESULT = success(STRING_LIST);
private static final Result<String> FAILURE = failure();
private static final Result<List<String>> LIST_FAILURE = failure();
private static final Object HANDLER = new Object();
private static final Object SECOND_HANDLER = new Object();
@LayoutRes
private static final int LAYOUT_ID = 1;
private static final int DYNAMIC_LAYOUT_ID = 2;
private static final int ITEM_ID = 3;
private static final int HANDLER_ID = 4;
private static final int SECOND_HANDLER_ID = 5;
private static final int COLLECTION_ID = 6;
private static final long STABLE_ID = 2;
@Mock
private Function<String, Integer> layoutForItem;
@Mock
private Function<String, Integer> itemIdForItem;
@Mock
private ViewDataBinding viewDataBinding;
@Mock
private View view;
@Mock
private ListUpdateCallback listUpdateCallback;
private ViewHolder viewHolder;
@Before
public void setUp() {
initMocks(this);
viewHolder = new ViewHolder(view) {};
setDataBinding(viewDataBinding, LAYOUT_ID);
setDataBinding(viewDataBinding, DYNAMIC_LAYOUT_ID);
when(view.getTag()).thenReturn("string");
when(layoutForItem.apply(SECOND_STRING)).thenReturn(DYNAMIC_LAYOUT_ID);
when(itemIdForItem.apply(SECOND_STRING)).thenReturn(ITEM_ID);
}
@Test
public void shouldBindRepositoryPresenterOfResult() {
final RepositoryPresenter<Result<String>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.forResult();
resultRepositoryPresenter.bind(STRING_RESULT, 0, viewHolder);
verify(view).setTag(R.id.agera__rvdatabinding__item_id, ITEM_ID);
verify(viewDataBinding).setVariable(ITEM_ID, STRING);
verify(viewDataBinding).setVariable(HANDLER_ID, HANDLER);
verify(viewDataBinding).setVariable(SECOND_HANDLER_ID, SECOND_HANDLER);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldBindRepositoryPresenterWithoutItem() {
final RepositoryPresenter<String> repositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.forItem();
repositoryPresenter.bind(STRING, 0, viewHolder);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldBindRepositoryPresenterOfCollection() {
final RepositoryPresenter<String> repositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.forCollection(new Function<String, List<String>>() {
@NonNull
@Override
public List<String> apply(@NonNull final String input) {
return singletonList(valueOf(input.charAt(0)));
}
});
repositoryPresenter.bind(STRING, 0, viewHolder);
verify(viewDataBinding).setVariable(ITEM_ID, FIRST_STRING_CHARACTER);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldBindRepositoryPresenterCollectionOfCollection() {
final RepositoryPresenter<String> repositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.collectionId(COLLECTION_ID)
.forCollection(new StringToFirstCharStringList());
repositoryPresenter.bind(STRING, 0, viewHolder);
verify(viewDataBinding).setVariable(ITEM_ID, FIRST_STRING_CHARACTER);
verify(viewDataBinding).setVariable(COLLECTION_ID, STRING);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldHandleRecycleOfRepositoryPresenterWithoutItemId() {
final RepositoryPresenter<String> repositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.onRecycle(CLEAR_ALL)
.forItem();
repositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldNotRecycleRepositoryPresenterOfResultWithNoRecycling() {
final RepositoryPresenter<Result<String>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(DO_NOTHING)
.forResult();
resultRepositoryPresenter.recycle(viewHolder);
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRecycleRepositoryPresenterOfResultWithItemRecycling() {
when(view.getTag(R.id.agera__rvdatabinding__item_id)).thenReturn(ITEM_ID);
final RepositoryPresenter<Result<String>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(CLEAR_ITEM)
.forResult();
resultRepositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).setVariable(ITEM_ID, null);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRecycleRepositoryPresenterOfResultWithAllRecycling() {
when(view.getTag(R.id.agera__rvdatabinding__item_id)).thenReturn(ITEM_ID);
final RepositoryPresenter<Result<String>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(CLEAR_ALL)
.forResult();
resultRepositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).setVariable(ITEM_ID, null);
verify(viewDataBinding).setVariable(HANDLER_ID, null);
verify(viewDataBinding).setVariable(SECOND_HANDLER_ID, null);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRecycleRepositoryPresenterOfResultWithHandlerRecycling() {
when(view.getTag(R.id.agera__rvdatabinding__item_id)).thenReturn(ITEM_ID);
final RepositoryPresenter<Result<String>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(CLEAR_HANDLERS)
.forResult();
resultRepositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).setVariable(HANDLER_ID, null);
verify(viewDataBinding).setVariable(SECOND_HANDLER_ID, null);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldNotRecycleRepositoryPresenterOfCollectionWithNoRecycling() {
final RepositoryPresenter<String> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(DO_NOTHING)
.collectionId(COLLECTION_ID)
.forCollection(new StringToFirstCharStringList());
resultRepositoryPresenter.recycle(viewHolder);
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRecycleRepositoryPresenterOfCollectionWithItemRecycling() {
when(view.getTag(R.id.agera__rvdatabinding__item_id)).thenReturn(ITEM_ID);
final RepositoryPresenter<String> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(CLEAR_ITEM)
.collectionId(COLLECTION_ID)
.forCollection(new StringToFirstCharStringList());
resultRepositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).setVariable(ITEM_ID, null);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRecycleRepositoryPresenterOfCollectionWithAllRecycling() {
when(view.getTag(R.id.agera__rvdatabinding__item_id)).thenReturn(ITEM_ID);
when(view.getTag(R.id.agera__rvdatabinding__collection_id)).thenReturn(COLLECTION_ID);
final RepositoryPresenter<String> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(CLEAR_ALL)
.collectionId(COLLECTION_ID)
.forCollection(new StringToFirstCharStringList());
resultRepositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).setVariable(ITEM_ID, null);
verify(viewDataBinding).setVariable(HANDLER_ID, null);
verify(viewDataBinding).setVariable(SECOND_HANDLER_ID, null);
verify(viewDataBinding).setVariable(COLLECTION_ID, null);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRecycleRepositoryPresenterOfCollectionWithCollectionRecycling() {
when(view.getTag(R.id.agera__rvdatabinding__item_id)).thenReturn(ITEM_ID);
when(view.getTag(R.id.agera__rvdatabinding__collection_id)).thenReturn(COLLECTION_ID);
final RepositoryPresenter<String> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(CLEAR_COLLECTION)
.collectionId(COLLECTION_ID)
.forCollection(new StringToFirstCharStringList());
resultRepositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).setVariable(COLLECTION_ID, null);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRecycleRepositoryPresenterOfCollectionWithHandlerRecycling() {
when(view.getTag(R.id.agera__rvdatabinding__item_id)).thenReturn(ITEM_ID);
when(view.getTag(R.id.agera__rvdatabinding__collection_id)).thenReturn(COLLECTION_ID);
final RepositoryPresenter<String> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(CLEAR_HANDLERS)
.collectionId(COLLECTION_ID)
.forCollection(new StringToFirstCharStringList());
resultRepositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).setVariable(HANDLER_ID, null);
verify(viewDataBinding).setVariable(SECOND_HANDLER_ID, null);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldBindRepositoryPresenterOfResultList() {
final RepositoryPresenter<Result<List<String>>> resultListRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.forResultList();
resultListRepositoryPresenter.bind(STRING_LIST_RESULT, 1, viewHolder);
verify(view).setTag(R.id.agera__rvdatabinding__item_id, ITEM_ID);
verify(viewDataBinding).setVariable(ITEM_ID, SECOND_STRING);
verify(viewDataBinding).setVariable(HANDLER_ID, HANDLER);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldNotRecycleRepositoryPresenterOfResultListWithNoRecycling() {
final RepositoryPresenter<Result<List<String>>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(DO_NOTHING)
.forResultList();
resultRepositoryPresenter.recycle(viewHolder);
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRecycleRepositoryPresenterOfResultListWithItemRecycling() {
when(view.getTag(R.id.agera__rvdatabinding__item_id)).thenReturn(ITEM_ID);
final RepositoryPresenter<Result<List<String>>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(CLEAR_ITEM)
.forResultList();
resultRepositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).setVariable(ITEM_ID, null);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRecycleRepositoryPresenterOfResultListWithAllRecycling() {
when(view.getTag(R.id.agera__rvdatabinding__item_id)).thenReturn(ITEM_ID);
final RepositoryPresenter<Result<List<String>>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(CLEAR_ALL)
.forResultList();
resultRepositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).setVariable(ITEM_ID, null);
verify(viewDataBinding).setVariable(HANDLER_ID, null);
verify(viewDataBinding).setVariable(SECOND_HANDLER_ID, null);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRecycleRepositoryPresenterOfResultListWithHandlerRecycling() {
when(view.getTag(R.id.agera__rvdatabinding__item_id)).thenReturn(ITEM_ID);
final RepositoryPresenter<Result<List<String>>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(CLEAR_HANDLERS)
.forResultList();
resultRepositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).setVariable(HANDLER_ID, null);
verify(viewDataBinding).setVariable(SECOND_HANDLER_ID, null);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRecycleRepositoryPresenterOfItemWithItemRecycling() {
when(view.getTag(R.id.agera__rvdatabinding__item_id)).thenReturn(ITEM_ID);
final RepositoryPresenter<String> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(CLEAR_ITEM)
.forItem();
resultRepositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).setVariable(ITEM_ID, null);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRecycleRepositoryPresenterOfItemWithAllRecycling() {
when(view.getTag(R.id.agera__rvdatabinding__item_id)).thenReturn(ITEM_ID);
final RepositoryPresenter<String> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(CLEAR_ALL)
.forItem();
resultRepositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).setVariable(ITEM_ID, null);
verify(viewDataBinding).setVariable(HANDLER_ID, null);
verify(viewDataBinding).setVariable(SECOND_HANDLER_ID, null);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRecycleRepositoryPresenterOfItemWithHandlerRecycling() {
when(view.getTag(R.id.agera__rvdatabinding__item_id)).thenReturn(ITEM_ID);
final RepositoryPresenter<String> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(CLEAR_HANDLERS)
.forItem();
resultRepositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).setVariable(HANDLER_ID, null);
verify(viewDataBinding).setVariable(SECOND_HANDLER_ID, null);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldBindRepositoryPresenterOfItem() {
final RepositoryPresenter<String> itemRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.forItem();
itemRepositoryPresenter.bind(STRING, 0, viewHolder);
}
@Test
public void shouldBindRepositoryPresenterOfList() {
final RepositoryPresenter<List<String>> listRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.forList();
listRepositoryPresenter.bind(STRING_LIST, 1, viewHolder);
verify(view).setTag(R.id.agera__rvdatabinding__item_id, ITEM_ID);
verify(viewDataBinding).setVariable(ITEM_ID, SECOND_STRING);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldNotRecycleRepositoryPresenterOfListWithNoRecycling() {
final RepositoryPresenter<List<String>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(DO_NOTHING)
.forList();
resultRepositoryPresenter.recycle(viewHolder);
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRecycleRepositoryPresenterOfListWithItemRecycling() {
when(view.getTag(R.id.agera__rvdatabinding__item_id)).thenReturn(ITEM_ID);
final RepositoryPresenter<List<String>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(CLEAR_ITEM)
.forList();
resultRepositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).setVariable(ITEM_ID, null);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRecycleRepositoryPresenterOfListWithAllRecycling() {
when(view.getTag(R.id.agera__rvdatabinding__item_id)).thenReturn(ITEM_ID);
final RepositoryPresenter<List<String>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(CLEAR_ALL)
.forList();
resultRepositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).setVariable(ITEM_ID, null);
verify(viewDataBinding).setVariable(HANDLER_ID, null);
verify(viewDataBinding).setVariable(SECOND_HANDLER_ID, null);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRecycleRepositoryPresenterOfListWithHandlerRecycling() {
when(view.getTag(R.id.agera__rvdatabinding__item_id)).thenReturn(ITEM_ID);
final RepositoryPresenter<List<String>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.handler(SECOND_HANDLER_ID, SECOND_HANDLER)
.onRecycle(CLEAR_HANDLERS)
.forList();
resultRepositoryPresenter.recycle(viewHolder);
verify(viewDataBinding).setVariable(HANDLER_ID, null);
verify(viewDataBinding).setVariable(SECOND_HANDLER_ID, null);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldReturnZeroForCountOfRepositoryPresenterOfFailedResult() {
final RepositoryPresenter<Result<String>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.handler(HANDLER_ID, HANDLER)
.forResult();
assertThat(resultRepositoryPresenter.getItemCount(FAILURE), is(0));
}
@Test
public void shouldReturnOneForCountOfRepositoryPresenterOfSuccessfulResult() {
final RepositoryPresenter<Result<String>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.forResult();
assertThat(resultRepositoryPresenter.getItemCount(STRING_RESULT), is(1));
}
@Test
public void shouldReturnListSizeForCountOfRepositoryPresenterOfList() {
final RepositoryPresenter<List<String>> listRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.forList();
assertThat(listRepositoryPresenter.getItemCount(STRING_LIST), is(STRING_LIST.size()));
}
@Test
public void shouldReturnZeroForCountOfRepositoryPresenterOfFailedResultList() {
final RepositoryPresenter<Result<List<String>>> resultListRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.forResultList();
assertThat(resultListRepositoryPresenter.getItemCount(LIST_FAILURE), is(0));
}
@Test
public void shouldReturnListSizeForCountOfRepositoryPresenterOfSuccessfulResultList() {
final RepositoryPresenter<Result<List<String>>> resultListRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.forResultList();
assertThat(resultListRepositoryPresenter.getItemCount(STRING_LIST_RESULT),
is(STRING_LIST.size()));
}
@Test
public void shouldGenerateLayoutForItemOfRepositoryPresenterOfResultList() {
final RepositoryPresenter<Result<List<String>>> resultListRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layoutForItem(layoutForItem)
.itemId(ITEM_ID)
.forResultList();
assertThat(resultListRepositoryPresenter.getLayoutResId(STRING_LIST_RESULT, 1),
is(DYNAMIC_LAYOUT_ID));
}
@Test
public void shouldGenerateItemIdForItemOfRepositoryPresenterOfResultList() {
final RepositoryPresenter<Result<List<String>>> resultListRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemIdForItem(itemIdForItem)
.forResultList();
resultListRepositoryPresenter.bind(STRING_LIST_RESULT, 1, viewHolder);
}
@Test
public void shouldReturnStableIdForRepositoryPresenterOfItem() {
final RepositoryPresenter<String> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.stableIdForItem(Functions.<String, Long>staticFunction(STABLE_ID))
.forItem();
assertThat(resultRepositoryPresenter.getItemId(STRING, 0), is(STABLE_ID));
}
@Test
public void shouldReturnStableIdForRepositoryPresenterOfResult() {
final RepositoryPresenter<Result<String>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.stableIdForItem(Functions.<String, Long>staticFunction(STABLE_ID))
.forResult();
assertThat(resultRepositoryPresenter.getItemId(STRING_RESULT, 0), is(STABLE_ID));
}
@Test
public void shouldReturnStaticStableIdForRepositoryPresenterOfItem() {
final RepositoryPresenter<String> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.stableId(STABLE_ID)
.forItem();
assertThat(resultRepositoryPresenter.getItemId(STRING, 0), is(STABLE_ID));
}
@Test
public void shouldReturnStaticStableIdForRepositoryPresenterOfResult() {
final RepositoryPresenter<Result<String>> resultRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.stableId(STABLE_ID)
.forResult();
assertThat(resultRepositoryPresenter.getItemId(STRING_RESULT, 0), is(STABLE_ID));
}
@Test
public void shouldReturnStableIdForRepositoryPresenterOfResultList() {
final RepositoryPresenter<Result<List<String>>> resultListRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.stableIdForItem(Functions.<String, Long>staticFunction(STABLE_ID))
.forResultList();
assertThat(resultListRepositoryPresenter.getItemId(STRING_LIST_RESULT, 0), is(STABLE_ID));
}
@Test
public void shouldReturnStableIdForRepositoryPresenterOfList() {
final RepositoryPresenter<List<String>> listRepositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.stableIdForItem(Functions.<String, Long>staticFunction(STABLE_ID))
.forList();
assertThat(listRepositoryPresenter.getItemId(STRING_LIST, 0), is(STABLE_ID));
}
@Test
public void shouldHandleRebindWithSameData() {
final RepositoryPresenter<String> repositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.forItem();
repositoryPresenter.bind(STRING, 0, viewHolder);
verify(viewDataBinding).setVariable(ITEM_ID, STRING);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
reset(viewDataBinding);
repositoryPresenter.bind(STRING, 0, viewHolder);
verify(viewDataBinding).setVariable(ITEM_ID, STRING);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldHandleRebindWithNewData() {
final RepositoryPresenter<String> repositoryPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.forItem();
repositoryPresenter.bind(STRING, 0, viewHolder);
verify(viewDataBinding).setVariable(ITEM_ID, STRING);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
reset(viewDataBinding);
repositoryPresenter.bind(SECOND_STRING, 0, viewHolder);
verify(viewDataBinding).setVariable(ITEM_ID, SECOND_STRING);
verify(viewDataBinding).executePendingBindings();
verifyNoMoreInteractions(viewDataBinding);
}
@Test
public void shouldRefuseFineGrainedEventsWithoutDiffWith() {
final RepositoryPresenter<String> presenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.forItem();
final boolean fineGrained = presenter.getUpdates("String1", "String2", listUpdateCallback);
assertThat(fineGrained, is(false));
}
@Test
public void shouldNotifyFineGrainedEventsWithDiffWith() {
final List<String> oldData = asList("A:1", "B:2", "C:3");
final List<String> newData = asList("B:2", "A:4", "C:5");
final DiffingLogic diffingLogic = new DiffingLogic(oldData, newData);
final RepositoryPresenter<List<String>> diffingPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.diffWith(diffingLogic, false)
.forList();
final boolean fineGrained = diffingPresenter.getUpdates(oldData, newData, listUpdateCallback);
assertThat(fineGrained, is(true));
DiffUtil.calculateDiff(diffingLogic, false).dispatchUpdatesTo(
verifyingWrapper(listUpdateCallback));
verifyNoMoreInteractions(listUpdateCallback);
}
@Test
public void shouldNotifyFineGrainedEventsWithDiffWithMoveDetection() {
final List<String> oldData = asList("A:1", "B:2", "C:3", "D:0");
final List<String> newData = asList("B:2", "D:0", "A:4", "C:5");
final DiffingLogic diffingLogic = new DiffingLogic(oldData, newData);
final RepositoryPresenter<List<String>> diffingPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.diffWith(diffingLogic, true)
.forCollection(Functions.<List<String>>identityFunction());
final boolean fineGrained = diffingPresenter.getUpdates(oldData, newData, listUpdateCallback);
assertThat(fineGrained, is(true));
DiffUtil.calculateDiff(diffingLogic, true).dispatchUpdatesTo(
verifyingWrapper(listUpdateCallback));
verifyNoMoreInteractions(listUpdateCallback);
}
@Test
public void shouldNotifySingleItemFineGrainedEventsWithDiff() {
final Result<String> withA = success("A");
final Result<String> withB = success("B");
final Result<String> without = failure();
final RepositoryPresenter<Result<String>> diffingPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.diff()
.forResult();
boolean fineGrained = diffingPresenter.getUpdates(withA, withB, listUpdateCallback);
assertThat(fineGrained, is(true));
verify(listUpdateCallback).onChanged(0, 1, null);
verifyNoMoreInteractions(listUpdateCallback);
fineGrained = diffingPresenter.getUpdates(withA, without, listUpdateCallback);
assertThat(fineGrained, is(true));
verify(listUpdateCallback).onRemoved(0, 1);
verifyNoMoreInteractions(listUpdateCallback);
fineGrained = diffingPresenter.getUpdates(without, withB, listUpdateCallback);
assertThat(fineGrained, is(true));
verify(listUpdateCallback).onInserted(0, 1);
verifyNoMoreInteractions(listUpdateCallback);
}
@Test
public void shouldNotifyBlanketChangeEventForSameObjectForOldAndNewData() {
final List<String> oneList = asList("A:0", "B:1");
final DiffingLogic diffingLogic = new DiffingLogic(oneList, oneList);
final RepositoryPresenter<List<String>> diffingPresenter =
dataBindingRepositoryPresenterOf(String.class)
.layout(LAYOUT_ID)
.itemId(ITEM_ID)
.diffWith(diffingLogic, false)
.forList();
final boolean fineGrained = diffingPresenter.getUpdates(oneList, oneList, listUpdateCallback);
assertThat(fineGrained, is(true));
verify(listUpdateCallback).onChanged(0, oneList.size(), null);
verifyNoMoreInteractions(listUpdateCallback);
}
@Test
public void shouldHavePrivateConstructor() {
assertThat(DataBindingRepositoryPresenters.class, hasPrivateConstructor());
}
private static final class StringToFirstCharStringList implements Function<String, List<String>> {
@NonNull
@Override
public List<String> apply(@NonNull final String input) {
return singletonList(valueOf(input.charAt(0)));
}
}
} |
googleapis/google-cloud-java | 35,984 | java-dialogflow/proto-google-cloud-dialogflow-v2/src/main/java/com/google/cloud/dialogflow/v2/ListGeneratorsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/v2/generator.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.v2;
/**
*
*
* <pre>
* Response of ListGenerators.
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2.ListGeneratorsResponse}
*/
public final class ListGeneratorsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2.ListGeneratorsResponse)
ListGeneratorsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListGeneratorsResponse.newBuilder() to construct.
private ListGeneratorsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListGeneratorsResponse() {
generators_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListGeneratorsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2.GeneratorProto
.internal_static_google_cloud_dialogflow_v2_ListGeneratorsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2.GeneratorProto
.internal_static_google_cloud_dialogflow_v2_ListGeneratorsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2.ListGeneratorsResponse.class,
com.google.cloud.dialogflow.v2.ListGeneratorsResponse.Builder.class);
}
public static final int GENERATORS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.dialogflow.v2.Generator> generators_;
/**
*
*
* <pre>
* List of generators retrieved.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Generator generators = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.dialogflow.v2.Generator> getGeneratorsList() {
return generators_;
}
/**
*
*
* <pre>
* List of generators retrieved.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Generator generators = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.dialogflow.v2.GeneratorOrBuilder>
getGeneratorsOrBuilderList() {
return generators_;
}
/**
*
*
* <pre>
* List of generators retrieved.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Generator generators = 1;</code>
*/
@java.lang.Override
public int getGeneratorsCount() {
return generators_.size();
}
/**
*
*
* <pre>
* List of generators retrieved.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Generator generators = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.v2.Generator getGenerators(int index) {
return generators_.get(index);
}
/**
*
*
* <pre>
* List of generators retrieved.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Generator generators = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.v2.GeneratorOrBuilder getGeneratorsOrBuilder(int index) {
return generators_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < generators_.size(); i++) {
output.writeMessage(1, generators_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < generators_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, generators_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.v2.ListGeneratorsResponse)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.v2.ListGeneratorsResponse other =
(com.google.cloud.dialogflow.v2.ListGeneratorsResponse) obj;
if (!getGeneratorsList().equals(other.getGeneratorsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getGeneratorsCount() > 0) {
hash = (37 * hash) + GENERATORS_FIELD_NUMBER;
hash = (53 * hash) + getGeneratorsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.v2.ListGeneratorsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.ListGeneratorsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.ListGeneratorsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.ListGeneratorsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.ListGeneratorsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.ListGeneratorsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.ListGeneratorsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.ListGeneratorsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.ListGeneratorsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.ListGeneratorsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.ListGeneratorsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.ListGeneratorsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.v2.ListGeneratorsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response of ListGenerators.
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2.ListGeneratorsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2.ListGeneratorsResponse)
com.google.cloud.dialogflow.v2.ListGeneratorsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2.GeneratorProto
.internal_static_google_cloud_dialogflow_v2_ListGeneratorsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2.GeneratorProto
.internal_static_google_cloud_dialogflow_v2_ListGeneratorsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2.ListGeneratorsResponse.class,
com.google.cloud.dialogflow.v2.ListGeneratorsResponse.Builder.class);
}
// Construct using com.google.cloud.dialogflow.v2.ListGeneratorsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (generatorsBuilder_ == null) {
generators_ = java.util.Collections.emptyList();
} else {
generators_ = null;
generatorsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.v2.GeneratorProto
.internal_static_google_cloud_dialogflow_v2_ListGeneratorsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.ListGeneratorsResponse getDefaultInstanceForType() {
return com.google.cloud.dialogflow.v2.ListGeneratorsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.ListGeneratorsResponse build() {
com.google.cloud.dialogflow.v2.ListGeneratorsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.ListGeneratorsResponse buildPartial() {
com.google.cloud.dialogflow.v2.ListGeneratorsResponse result =
new com.google.cloud.dialogflow.v2.ListGeneratorsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.dialogflow.v2.ListGeneratorsResponse result) {
if (generatorsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
generators_ = java.util.Collections.unmodifiableList(generators_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.generators_ = generators_;
} else {
result.generators_ = generatorsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.dialogflow.v2.ListGeneratorsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.v2.ListGeneratorsResponse) {
return mergeFrom((com.google.cloud.dialogflow.v2.ListGeneratorsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.v2.ListGeneratorsResponse other) {
if (other == com.google.cloud.dialogflow.v2.ListGeneratorsResponse.getDefaultInstance())
return this;
if (generatorsBuilder_ == null) {
if (!other.generators_.isEmpty()) {
if (generators_.isEmpty()) {
generators_ = other.generators_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureGeneratorsIsMutable();
generators_.addAll(other.generators_);
}
onChanged();
}
} else {
if (!other.generators_.isEmpty()) {
if (generatorsBuilder_.isEmpty()) {
generatorsBuilder_.dispose();
generatorsBuilder_ = null;
generators_ = other.generators_;
bitField0_ = (bitField0_ & ~0x00000001);
generatorsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getGeneratorsFieldBuilder()
: null;
} else {
generatorsBuilder_.addAllMessages(other.generators_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.dialogflow.v2.Generator m =
input.readMessage(
com.google.cloud.dialogflow.v2.Generator.parser(), extensionRegistry);
if (generatorsBuilder_ == null) {
ensureGeneratorsIsMutable();
generators_.add(m);
} else {
generatorsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.dialogflow.v2.Generator> generators_ =
java.util.Collections.emptyList();
private void ensureGeneratorsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
generators_ =
new java.util.ArrayList<com.google.cloud.dialogflow.v2.Generator>(generators_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.v2.Generator,
com.google.cloud.dialogflow.v2.Generator.Builder,
com.google.cloud.dialogflow.v2.GeneratorOrBuilder>
generatorsBuilder_;
/**
*
*
* <pre>
* List of generators retrieved.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Generator generators = 1;</code>
*/
public java.util.List<com.google.cloud.dialogflow.v2.Generator> getGeneratorsList() {
if (generatorsBuilder_ == null) {
return java.util.Collections.unmodifiableList(generators_);
} else {
return generatorsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* List of generators retrieved.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Generator generators = 1;</code>
*/
public int getGeneratorsCount() {
if (generatorsBuilder_ == null) {
return generators_.size();
} else {
return generatorsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* List of generators retrieved.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Generator generators = 1;</code>
*/
public com.google.cloud.dialogflow.v2.Generator getGenerators(int index) {
if (generatorsBuilder_ == null) {
return generators_.get(index);
} else {
return generatorsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* List of generators retrieved.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Generator generators = 1;</code>
*/
public Builder setGenerators(int index, com.google.cloud.dialogflow.v2.Generator value) {
if (generatorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGeneratorsIsMutable();
generators_.set(index, value);
onChanged();
} else {
generatorsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of generators retrieved.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Generator generators = 1;</code>
*/
public Builder setGenerators(
int index, com.google.cloud.dialogflow.v2.Generator.Builder builderForValue) {
if (generatorsBuilder_ == null) {
ensureGeneratorsIsMutable();
generators_.set(index, builderForValue.build());
onChanged();
} else {
generatorsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of generators retrieved.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Generator generators = 1;</code>
*/
public Builder addGenerators(com.google.cloud.dialogflow.v2.Generator value) {
if (generatorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGeneratorsIsMutable();
generators_.add(value);
onChanged();
} else {
generatorsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* List of generators retrieved.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Generator generators = 1;</code>
*/
public Builder addGenerators(int index, com.google.cloud.dialogflow.v2.Generator value) {
if (generatorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGeneratorsIsMutable();
generators_.add(index, value);
onChanged();
} else {
generatorsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of generators retrieved.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Generator generators = 1;</code>
*/
public Builder addGenerators(com.google.cloud.dialogflow.v2.Generator.Builder builderForValue) {
if (generatorsBuilder_ == null) {
ensureGeneratorsIsMutable();
generators_.add(builderForValue.build());
onChanged();
} else {
generatorsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of generators retrieved.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Generator generators = 1;</code>
*/
public Builder addGenerators(
int index, com.google.cloud.dialogflow.v2.Generator.Builder builderForValue) {
if (generatorsBuilder_ == null) {
ensureGeneratorsIsMutable();
generators_.add(index, builderForValue.build());
onChanged();
} else {
generatorsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of generators retrieved.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Generator generators = 1;</code>
*/
public Builder addAllGenerators(
java.lang.Iterable<? extends com.google.cloud.dialogflow.v2.Generator> values) {
if (generatorsBuilder_ == null) {
ensureGeneratorsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, generators_);
onChanged();
} else {
generatorsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* List of generators retrieved.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Generator generators = 1;</code>
*/
public Builder clearGenerators() {
if (generatorsBuilder_ == null) {
generators_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
generatorsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* List of generators retrieved.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Generator generators = 1;</code>
*/
public Builder removeGenerators(int index) {
if (generatorsBuilder_ == null) {
ensureGeneratorsIsMutable();
generators_.remove(index);
onChanged();
} else {
generatorsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* List of generators retrieved.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Generator generators = 1;</code>
*/
public com.google.cloud.dialogflow.v2.Generator.Builder getGeneratorsBuilder(int index) {
return getGeneratorsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* List of generators retrieved.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Generator generators = 1;</code>
*/
public com.google.cloud.dialogflow.v2.GeneratorOrBuilder getGeneratorsOrBuilder(int index) {
if (generatorsBuilder_ == null) {
return generators_.get(index);
} else {
return generatorsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* List of generators retrieved.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Generator generators = 1;</code>
*/
public java.util.List<? extends com.google.cloud.dialogflow.v2.GeneratorOrBuilder>
getGeneratorsOrBuilderList() {
if (generatorsBuilder_ != null) {
return generatorsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(generators_);
}
}
/**
*
*
* <pre>
* List of generators retrieved.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Generator generators = 1;</code>
*/
public com.google.cloud.dialogflow.v2.Generator.Builder addGeneratorsBuilder() {
return getGeneratorsFieldBuilder()
.addBuilder(com.google.cloud.dialogflow.v2.Generator.getDefaultInstance());
}
/**
*
*
* <pre>
* List of generators retrieved.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Generator generators = 1;</code>
*/
public com.google.cloud.dialogflow.v2.Generator.Builder addGeneratorsBuilder(int index) {
return getGeneratorsFieldBuilder()
.addBuilder(index, com.google.cloud.dialogflow.v2.Generator.getDefaultInstance());
}
/**
*
*
* <pre>
* List of generators retrieved.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Generator generators = 1;</code>
*/
public java.util.List<com.google.cloud.dialogflow.v2.Generator.Builder>
getGeneratorsBuilderList() {
return getGeneratorsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.v2.Generator,
com.google.cloud.dialogflow.v2.Generator.Builder,
com.google.cloud.dialogflow.v2.GeneratorOrBuilder>
getGeneratorsFieldBuilder() {
if (generatorsBuilder_ == null) {
generatorsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.v2.Generator,
com.google.cloud.dialogflow.v2.Generator.Builder,
com.google.cloud.dialogflow.v2.GeneratorOrBuilder>(
generators_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
generators_ = null;
}
return generatorsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2.ListGeneratorsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2.ListGeneratorsResponse)
private static final com.google.cloud.dialogflow.v2.ListGeneratorsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2.ListGeneratorsResponse();
}
public static com.google.cloud.dialogflow.v2.ListGeneratorsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListGeneratorsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListGeneratorsResponse>() {
@java.lang.Override
public ListGeneratorsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListGeneratorsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListGeneratorsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.ListGeneratorsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/ignite-3 | 36,274 | modules/sql-engine/src/test/java/org/apache/ignite/internal/sql/engine/planner/ColocatedHashAggregatePlannerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.sql.engine.planner;
import static java.util.function.Predicate.not;
import java.util.List;
import java.util.Objects;
import java.util.function.Predicate;
import org.apache.calcite.rel.RelCollation;
import org.apache.calcite.rel.RelCollations;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.core.AggregateCall;
import org.apache.ignite.internal.sql.engine.rel.IgniteCorrelatedNestedLoopJoin;
import org.apache.ignite.internal.sql.engine.rel.IgniteExchange;
import org.apache.ignite.internal.sql.engine.rel.IgniteLimit;
import org.apache.ignite.internal.sql.engine.rel.IgniteMergeJoin;
import org.apache.ignite.internal.sql.engine.rel.IgniteSort;
import org.apache.ignite.internal.sql.engine.rel.IgniteTableScan;
import org.apache.ignite.internal.sql.engine.rel.agg.IgniteColocatedHashAggregate;
import org.apache.ignite.internal.sql.engine.schema.IgniteIndex.Collation;
import org.apache.ignite.internal.sql.engine.trait.IgniteDistributions;
import org.apache.ignite.internal.sql.engine.trait.TraitUtils;
import org.apache.ignite.internal.util.ArrayUtils;
import org.junit.jupiter.api.Test;
/**
* This test verifies that queries defined in {@link TestCase TestCase} can be optimized with usage of
* colocated hash aggregates only.
*
* <p>See {@link AbstractAggregatePlannerTest base class} for more details.
*/
public class ColocatedHashAggregatePlannerTest extends AbstractAggregatePlannerTest {
private final String[] disableRules = {
"MapReduceHashAggregateConverterRule",
"MapReduceSortAggregateConverterRule",
"ColocatedSortAggregateConverterRule"
};
/**
* Validates a plan for simple query with aggregate.
*/
@Test
protected void simpleAggregate() throws Exception {
checkSimpleAggSingle(TestCase.CASE_1);
checkSimpleAggHash(TestCase.CASE_1A);
checkSimpleAggHash(TestCase.CASE_1B);
}
/**
* Validates a plan for simple query with DISTINCT aggregates.
*
* @see #minMaxDistinctAggregate()
*/
@Test
public void distinctAggregate() throws Exception {
checkDistinctAggSingle(TestCase.CASE_2_1);
checkDistinctAggSingle(TestCase.CASE_2_2);
checkDistinctAggHash(TestCase.CASE_2_1A);
checkDistinctAggHash(TestCase.CASE_2_2A);
checkDistinctAggHash(TestCase.CASE_2_1B);
checkDistinctAggHash(TestCase.CASE_2_2B);
checkColocatedDistinctAggHash(TestCase.CASE_2_1C);
checkColocatedDistinctAggHash(TestCase.CASE_2_2C);
checkColocatedDistinctAggHash(TestCase.CASE_2_1D);
checkColocatedDistinctAggHash(TestCase.CASE_2_2D);
}
/**
* Validates a plan for a query with min/max distinct aggregate.
*
* <p>NB: DISTINCT make no sense for MIN/MAX, thus expected plan is the same as in {@link #simpleAggregate()} ()}
*/
@Test
public void minMaxDistinctAggregate() throws Exception {
checkSimpleAggSingle(TestCase.CASE_3_1);
checkSimpleAggSingle(TestCase.CASE_3_2);
checkSimpleAggHash(TestCase.CASE_3_1A);
checkSimpleAggHash(TestCase.CASE_3_2A);
checkSimpleAggHash(TestCase.CASE_3_1B);
checkSimpleAggHash(TestCase.CASE_3_2B);
checkSimpleAggHash(TestCase.CASE_3_1C);
checkSimpleAggHash(TestCase.CASE_3_2C);
checkSimpleAggHash(TestCase.CASE_3_1D);
checkSimpleAggHash(TestCase.CASE_3_2D);
}
/**
* Validates a plan for a query with aggregate and groups.
*/
@Test
public void simpleAggregateWithGroupBy() throws Exception {
checkSimpleAggWithGroupBySingle(TestCase.CASE_5);
checkSimpleAggWithGroupBySingle(TestCase.CASE_6);
checkSimpleAggWithGroupByHash(TestCase.CASE_5A);
checkSimpleAggWithGroupByHash(TestCase.CASE_6A);
checkSimpleAggWithGroupByHash(TestCase.CASE_5B);
checkSimpleAggWithGroupByHash(TestCase.CASE_6B);
checkSimpleAggWithColocatedGroupBy(TestCase.CASE_5C);
checkSimpleAggWithColocatedGroupBy(TestCase.CASE_6C);
checkSimpleAggWithColocatedGroupBy(TestCase.CASE_5D);
}
/**
* Validates a plan for a query with DISTINCT aggregates and groups.
*
* @see #minMaxDistinctAggregateWithGroupBy()
*/
@Test
public void distinctAggregateWithGroups() throws Exception {
checkDistinctAggWithGroupBySingle(TestCase.CASE_7_1);
checkDistinctAggWithGroupBySingle(TestCase.CASE_7_2);
checkDistinctAggWithGroupBySingle(TestCase.CASE_7_3);
checkDistinctAggWithGroupByHash(TestCase.CASE_7_1A);
checkDistinctAggWithGroupByHash(TestCase.CASE_7_2A);
checkDistinctAggWithGroupByHash(TestCase.CASE_7_3A);
checkDistinctAggWithGroupByHash(TestCase.CASE_7_1B);
checkDistinctAggWithGroupByHash(TestCase.CASE_7_2B);
checkDistinctAggWithGroupByHash(TestCase.CASE_7_3B);
checkDistinctAggWithColocatedGroupByHash(TestCase.CASE_7_1C);
checkDistinctAggWithColocatedGroupByHash(TestCase.CASE_7_2C);
checkDistinctAggWithColocatedGroupByHash(TestCase.CASE_7_3C);
checkDistinctAggWithColocatedGroupByHash(TestCase.CASE_7_1D);
checkDistinctAggWithColocatedGroupByHash(TestCase.CASE_7_2D);
checkDistinctAggWithColocatedGroupByHash(TestCase.CASE_7_3D);
}
/**
* Validates a plan for a query with min/max distinct aggregates and groups.
*
* <p>NB: DISTINCT make no sense for MIN/MAX, thus expected plan is the same as in {@link #simpleAggregateWithGroupBy()}
*/
@Test
public void minMaxDistinctAggregateWithGroupBy() throws Exception {
checkSimpleAggWithGroupBySingle(TestCase.CASE_8_1);
checkSimpleAggWithGroupBySingle(TestCase.CASE_8_2);
checkSimpleAggWithGroupByHash(TestCase.CASE_8_1A);
checkSimpleAggWithGroupByHash(TestCase.CASE_8_2A);
checkSimpleAggWithGroupByHash(TestCase.CASE_8_1B);
checkSimpleAggWithGroupByHash(TestCase.CASE_8_2B);
checkSimpleAggWithColocatedGroupBy(TestCase.CASE_8_1C);
checkSimpleAggWithColocatedGroupBy(TestCase.CASE_8_2C);
checkSimpleAggWithColocatedGroupBy(TestCase.CASE_8_1D);
checkSimpleAggWithColocatedGroupBy(TestCase.CASE_8_2D);
}
/**
* Validates a plan uses an index for a query with aggregate if grouped by index prefix.
*
* <p>NB: GROUP BY columns order permutation shouldn't affect the plan.
*/
@Test
public void aggregateWithGroupByIndexPrefixColumns() throws Exception {
checkAggWithGroupByIndexColumnsSingle(TestCase.CASE_9);
checkAggWithGroupByIndexColumnsSingle(TestCase.CASE_10);
checkAggWithGroupByIndexColumnsSingle(TestCase.CASE_11);
checkAggWithGroupByIndexColumnsHash(TestCase.CASE_9A);
checkAggWithGroupByIndexColumnsHash(TestCase.CASE_10A);
checkAggWithGroupByIndexColumnsHash(TestCase.CASE_11A);
checkAggWithGroupByIndexColumnsHash(TestCase.CASE_9B);
checkAggWithGroupByIndexColumnsHash(TestCase.CASE_10B);
checkAggWithGroupByIndexColumnsHash(TestCase.CASE_11B);
checkAggWithColocatedGroupByIndexColumnsHash(TestCase.CASE_9C);
checkAggWithColocatedGroupByIndexColumnsHash(TestCase.CASE_10C);
checkAggWithColocatedGroupByIndexColumnsHash(TestCase.CASE_11C);
checkAggWithColocatedGroupByIndexColumnsHash(TestCase.CASE_9D);
}
/**
* Validates a plan for a query with DISTINCT and without aggregation function.
*/
@Test
public void distinctWithoutAggregate() throws Exception {
checkGroupWithNoAggregateSingle(TestCase.CASE_12);
checkGroupWithNoAggregateHash(TestCase.CASE_12A);
checkGroupWithNoAggregateHash(TestCase.CASE_12B);
checkColocatedGroupWithNoAggregateHash(TestCase.CASE_12C);
checkColocatedGroupWithNoAggregateHash(TestCase.CASE_12D);
checkGroupWithNoAggregateSingle(TestCase.CASE_13);
checkGroupWithNoAggregateHash(TestCase.CASE_13A);
checkGroupWithNoAggregateHash(TestCase.CASE_13B);
checkColocatedGroupWithNoAggregateHash(TestCase.CASE_13C);
checkColocatedGroupWithNoAggregateHash(TestCase.CASE_13D);
}
/**
* Validates a plan for a query which WHERE clause contains a sub-query with aggregate.
*/
@Test
public void subqueryWithAggregateInWhereClause() throws Exception {
checkSimpleAggSingle(TestCase.CASE_14);
checkSimpleAggHash(TestCase.CASE_14A);
checkSimpleAggHash(TestCase.CASE_14B);
}
/**
* Validates that the SINGLE_VALUE aggregate is added for a sub-query where a single value is expected.
*/
@Test
public void subqueryWithSingleValueAggregate() throws Exception {
checkSimpleAggSingle(TestCase.CASE_27, hasSingleValueAggregate());
checkSimpleAggSingle(TestCase.CASE_27A, hasSingleValueAggregate());
checkSimpleAggSingle(TestCase.CASE_27B, hasSingleValueAggregate());
checkSimpleAggSingle(TestCase.CASE_27C, hasSingleValueAggregate());
}
/**
* Validates a plan for a query with DISTINCT aggregate in WHERE clause.
*/
@Test
public void distinctAggregateInWhereClause() throws Exception {
checkGroupWithNoAggregateSingle(TestCase.CASE_15);
checkGroupWithNoAggregateHash(TestCase.CASE_15A);
checkGroupWithNoAggregateHash(TestCase.CASE_15B);
}
/**
* Validates a plan with merge-sort utilizes index if collation fits.
*/
@Test
public void noSortAppendingWithCorrectCollation() throws Exception {
String[] additionalRulesToDisable = {"NestedLoopJoinConverter", "CorrelatedNestedLoopJoin", "CorrelateToNestedLoopRule",
"HashJoinConverter"};
assertPlan(TestCase.CASE_16,
nodeOrAnyChild(isInstanceOf(IgniteSort.class)
.and(input(isInstanceOf(IgniteColocatedHashAggregate.class)
.and(input(isTableScan("TEST")))
))
),
ArrayUtils.concat(disableRules, additionalRulesToDisable)
);
assertPlan(TestCase.CASE_16A,
nodeOrAnyChild(isInstanceOf(IgniteSort.class)
.and(input(isInstanceOf(IgniteColocatedHashAggregate.class)
.and(input(isInstanceOf(IgniteExchange.class)
.and(input(isTableScan("TEST")))
))
))
),
ArrayUtils.concat(disableRules, additionalRulesToDisable)
);
assertPlan(TestCase.CASE_16B,
nodeOrAnyChild(isInstanceOf(IgniteSort.class)
.and(input(isInstanceOf(IgniteColocatedHashAggregate.class)
.and(input(isInstanceOf(IgniteExchange.class)
.and(input(isTableScan("TEST")))
))
))
),
ArrayUtils.concat(disableRules, additionalRulesToDisable)
);
}
/**
* Validates a plan for a sub-query with order and limit.
*/
@Test
public void emptyCollationPassThroughLimit() throws Exception {
assertPlan(TestCase.CASE_17,
hasChildThat(isInstanceOf(IgniteCorrelatedNestedLoopJoin.class)
.and(input(1, isInstanceOf(IgniteColocatedHashAggregate.class)
.and(input(isInstanceOf(IgniteSort.class)
.and(input(isTableScan("TEST")))
))
))
),
disableRules
);
assertPlan(TestCase.CASE_17A,
hasChildThat(isInstanceOf(IgniteCorrelatedNestedLoopJoin.class)
.and(input(1, isInstanceOf(IgniteColocatedHashAggregate.class)
.and(input(isInstanceOf(IgniteLimit.class)
.and(input(isInstanceOf(IgniteExchange.class)
.and(input(isInstanceOf(IgniteSort.class)
.and(input(isTableScan("TEST")))
))
))
))
))
),
disableRules
);
assertPlan(TestCase.CASE_17B,
hasChildThat(isInstanceOf(IgniteCorrelatedNestedLoopJoin.class)
.and(input(1, isInstanceOf(IgniteColocatedHashAggregate.class)
.and(input(isInstanceOf(IgniteLimit.class)
.and(input(isInstanceOf(IgniteExchange.class)
.and(input(isInstanceOf(IgniteSort.class)
.and(input(isTableScan("TEST")))
))
))
))
))
),
disableRules
);
}
/**
* Validates a plan for a query with aggregate and with groups and sorting by the same column set.
*/
@Test
public void groupsWithOrderByGroupColumns() throws Exception {
checkGroupsWithOrderByGroupColumnsSingle(TestCase.CASE_18_1, TraitUtils.createCollation(List.of(0, 1)));
checkGroupsWithOrderByGroupColumnsSingle(TestCase.CASE_18_2, TraitUtils.createCollation(List.of(1, 0)));
checkGroupsWithOrderByGroupColumnsSingle(TestCase.CASE_18_3, TraitUtils.createCollation(List.of(1, 0)));
checkGroupsWithOrderByGroupColumnsHash(TestCase.CASE_18_1A, TraitUtils.createCollation(List.of(0, 1)));
checkGroupsWithOrderByGroupColumnsHash(TestCase.CASE_18_2A, TraitUtils.createCollation(List.of(1, 0)));
checkGroupsWithOrderByGroupColumnsHash(TestCase.CASE_18_3A, TraitUtils.createCollation(List.of(1, 0)));
checkGroupsWithOrderByGroupColumnsHash(TestCase.CASE_18_1B, TraitUtils.createCollation(List.of(0, 1)));
checkGroupsWithOrderByGroupColumnsHash(TestCase.CASE_18_2B, TraitUtils.createCollation(List.of(1, 0)));
checkGroupsWithOrderByGroupColumnsHash(TestCase.CASE_18_3B, TraitUtils.createCollation(List.of(1, 0)));
}
/**
* Validates a plan for a query with aggregate and with sorting by subset of group columns.
*/
@Test
public void aggregateWithOrderByGroupColumns() throws Exception {
checkGroupsWithOrderByGroupColumnsSingle(TestCase.CASE_19_1, TraitUtils.createCollation(List.of(0)));
checkGroupsWithOrderByGroupColumnsSingle(TestCase.CASE_19_2, TraitUtils.createCollation(List.of(1)));
checkGroupsWithOrderByGroupColumnsSingle(TestCase.CASE_20, TraitUtils.createCollation(List.of(0, 1, 2)));
checkGroupsWithOrderByGroupColumnsHash(TestCase.CASE_19_1A, TraitUtils.createCollation(List.of(0)));
checkGroupsWithOrderByGroupColumnsHash(TestCase.CASE_19_2A, TraitUtils.createCollation(List.of(1)));
checkGroupsWithOrderByGroupColumnsHash(TestCase.CASE_20A, TraitUtils.createCollation(List.of(0, 1, 2)));
checkGroupsWithOrderByGroupColumnsHash(TestCase.CASE_19_1B, TraitUtils.createCollation(List.of(0)));
checkGroupsWithOrderByGroupColumnsHash(TestCase.CASE_19_2B, TraitUtils.createCollation(List.of(1)));
checkGroupsWithOrderByGroupColumnsHash(TestCase.CASE_20B, TraitUtils.createCollation(List.of(0, 1, 2)));
}
/**
* Validates a plan for a query with aggregate and groups, and EXPAND_DISTINCT_AGG hint.
*/
@Test
public void expandDistinctAggregates() throws Exception {
Predicate<? extends RelNode> subtreePredicate = nodeOrAnyChild(isInstanceOf(IgniteColocatedHashAggregate.class)
// Check the second aggregation step contains accumulators.
// Plan must not contain distinct accumulators.
.and(hasAggregate())
.and(not(hasDistinctAggregate()))
// Check the first aggregation step is SELECT DISTINCT (doesn't contain any accumulators)
.and(input(isInstanceOf(IgniteColocatedHashAggregate.class)
.and(not(hasAggregate()))
.and(hasGroups())
))
);
assertPlan(TestCase.CASE_21, nodeOrAnyChild(isInstanceOf(IgniteMergeJoin.class)
.and(input(0, subtreePredicate))
.and(input(1, subtreePredicate))
), disableRules);
subtreePredicate = nodeOrAnyChild(isInstanceOf(IgniteColocatedHashAggregate.class)
// Check the second aggregation step contains accumulators.
// Plan must not contain distinct accumulators.
.and(hasAggregate())
.and(not(hasDistinctAggregate()))
// Check the first aggregation step is SELECT DISTINCT (doesn't contain any accumulators)
.and(input(isInstanceOf(IgniteColocatedHashAggregate.class)
.and(not(hasAggregate()))
.and(hasGroups())
))
);
assertPlan(TestCase.CASE_21A, nodeOrAnyChild(isInstanceOf(IgniteMergeJoin.class)
.and(input(0, subtreePredicate))
.and(input(1, subtreePredicate))
), disableRules);
assertPlan(TestCase.CASE_21B, nodeOrAnyChild(isInstanceOf(IgniteMergeJoin.class)
.and(input(0, subtreePredicate))
.and(input(1, subtreePredicate))
), disableRules);
}
/**
* Validates that single phase COUNT aggregate is used.
*/
@Test
public void testCountAgg() throws Exception {
Predicate<AggregateCall> countMap = (a) ->
Objects.equals(a.getAggregation().getName(), "COUNT") && a.getArgList().equals(List.of(1));
Predicate<IgniteColocatedHashAggregate> nonColocatedGroupBy = isInstanceOf(IgniteColocatedHashAggregate.class)
.and(in -> hasAggregates(countMap).test(in.getAggCallList()))
.and(input(isInstanceOf(IgniteExchange.class)
.and(hasDistribution(IgniteDistributions.single()))));
assertPlan(TestCase.CASE_22, nonColocatedGroupBy, disableRules);
assertPlan(TestCase.CASE_22A, nonColocatedGroupBy, disableRules);
Predicate<IgniteExchange> colocatedGroupBy = isInstanceOf(IgniteExchange.class)
.and(hasDistribution(IgniteDistributions.single()))
.and(input(isInstanceOf(IgniteColocatedHashAggregate.class)
.and(in -> hasAggregates(countMap).test(in.getAggCallList()))));
assertPlan(TestCase.CASE_22B, colocatedGroupBy, disableRules);
assertPlan(TestCase.CASE_22C, colocatedGroupBy, disableRules);
}
/**
* Validates that single phase AVG aggregate is used.
*/
@Test
public void testAvgAgg() throws Exception {
Predicate<AggregateCall> countMap = (a) -> Objects.equals(a.getAggregation().getName(), "AVG") && a.getArgList().equals(List.of(1));
Predicate<IgniteColocatedHashAggregate> nonColocatedGroupBy = isInstanceOf(IgniteColocatedHashAggregate.class)
.and(in -> hasAggregates(countMap).test(in.getAggCallList()))
.and(input(isInstanceOf(IgniteExchange.class)
.and(hasDistribution(IgniteDistributions.single()))));
assertPlan(TestCase.CASE_23, nonColocatedGroupBy, disableRules);
assertPlan(TestCase.CASE_23A, nonColocatedGroupBy, disableRules);
Predicate<IgniteExchange> colocatedGroupBy = isInstanceOf(IgniteExchange.class)
.and(hasDistribution(IgniteDistributions.single()))
.and(input(isInstanceOf(IgniteColocatedHashAggregate.class)
.and(in -> hasAggregates(countMap).test(in.getAggCallList()))));
assertPlan(TestCase.CASE_23B, colocatedGroupBy, disableRules);
assertPlan(TestCase.CASE_23C, colocatedGroupBy, disableRules);
}
/**
* Validates a plan for a query with two aggregates: one w/o DISTINCT and one with DISTINCT: single distribution.
*/
@Test
public void countDistinctGroupSetSingle() throws Exception {
assertPlan(TestCase.CASE_24_1, isInstanceOf(IgniteColocatedHashAggregate.class)
.and(hasNoGroupSets(IgniteColocatedHashAggregate::getGroupSets))
.and(input(isInstanceOf(IgniteTableScan.class)
)), disableRules);
}
/**
* Validates a plan for a query with two aggregates: one w/o DISTINCT and one with DISTINCT: hash distribution.
*/
@Test
public void countDistinctGroupSetHash() throws Exception {
checkCountDistinctHash(TestCase.CASE_24_1A);
checkCountDistinctHash(TestCase.CASE_24_1B);
checkCountDistinctHash(TestCase.CASE_24_1D);
Predicate<RelNode> colocated2 = nodeOrAnyChild(isInstanceOf(IgniteColocatedHashAggregate.class)
.and(hasNoGroupSets(IgniteColocatedHashAggregate::getGroupSets))
.and(input(isInstanceOf(IgniteExchange.class)
.and(hasDistribution(IgniteDistributions.single()))
.and(input(isInstanceOf(IgniteColocatedHashAggregate.class)
.and(hasGroupSets(IgniteColocatedHashAggregate::getGroupSets, 1))
.and(input(isInstanceOf(IgniteTableScan.class))))
))
));
assertPlan(TestCase.CASE_24_1C, colocated2, disableRules);
assertPlan(TestCase.CASE_24_1E, colocated2, disableRules);
}
/**
* Validates a plan for a query with aggregate and with groups and sorting by the same column set in descending order.
*/
@Test
public void groupsWithOrderByGroupColumnDescending() throws Exception {
checkDerivedCollationWithOrderByGroupColumnSingle(TestCase.CASE_25);
checkDerivedCollationWithOrderByGroupColumnHash(TestCase.CASE_25A);
}
/**
* Validates a plan for a query with aggregate and with sorting in descending order by a subset of grouping column.
*/
@Test
public void groupsWithOrderBySubsetOfGroupColumnDescending() throws Exception {
checkDerivedCollationWithOrderBySubsetOfGroupColumnsSingle(TestCase.CASE_26);
checkDerivedCollationWithOrderBySubsetOfGroupColumnsHash(TestCase.CASE_26A);
}
/**
* Validates a plan for a query with GROUPING aggregate.
*/
@Test
public void groupsWithGroupingAggregate() throws Exception {
checkSimpleAggWithGroupBySingle(TestCase.CASE_28_1A);
checkSimpleAggWithGroupBySingle(TestCase.CASE_28_1B);
checkSimpleAggWithGroupByHash(TestCase.CASE_28_2A);
checkSimpleAggWithGroupByHash(TestCase.CASE_28_2B);
}
private void checkSimpleAggSingle(TestCase testCase) throws Exception {
checkSimpleAggSingle(testCase, hasAggregate());
}
private void checkSimpleAggSingle(TestCase testCase, Predicate<IgniteColocatedHashAggregate> aggPredicate) throws Exception {
assertPlan(testCase,
nodeOrAnyChild(isInstanceOf(IgniteColocatedHashAggregate.class)
.and(aggPredicate)
.and(input(isTableScan("TEST")))
),
disableRules
);
}
private void checkSimpleAggHash(TestCase testCase) throws Exception {
assertPlan(testCase,
nodeOrAnyChild(isInstanceOf(IgniteColocatedHashAggregate.class)
.and(hasAggregate())
.and(input(isInstanceOf(IgniteExchange.class)
.and(input(isTableScan("TEST")))
))
),
disableRules
);
}
private void checkSimpleAggWithGroupBySingle(TestCase testCase) throws Exception {
assertPlan(testCase,
nodeOrAnyChild(isInstanceOf(IgniteColocatedHashAggregate.class)
.and(hasAggregate())
.and(hasGroups())
.and(input(isTableScan("TEST")))
),
disableRules
);
}
private void checkSimpleAggWithGroupByHash(TestCase testCase) throws Exception {
assertPlan(testCase,
nodeOrAnyChild(isInstanceOf(IgniteColocatedHashAggregate.class)
.and(hasAggregate())
.and(hasGroups())
.and(input(isInstanceOf(IgniteExchange.class)
.and(input(isTableScan("TEST")))
))),
disableRules
);
}
private void checkSimpleAggWithColocatedGroupBy(TestCase testCase) throws Exception {
assertPlan(testCase,
nodeOrAnyChild(isInstanceOf(IgniteExchange.class)
.and(nodeOrAnyChild(isInstanceOf(IgniteColocatedHashAggregate.class)
.and(hasAggregate())
.and(hasGroups())
.and(input(isTableScan("TEST")))
))),
disableRules
);
}
private void checkDistinctAggWithColocatedGroupByHash(TestCase testCase) throws Exception {
assertPlan(testCase,
nodeOrAnyChild(isInstanceOf(IgniteExchange.class)
.and(nodeOrAnyChild(isInstanceOf(IgniteColocatedHashAggregate.class)
.and(hasAggregate())
.and(hasGroups())
.and(hasDistinctAggregate())
.and(input(isTableScan("TEST")))
))),
disableRules
);
}
private void checkDistinctAggSingle(TestCase testCase) throws Exception {
assertPlan(testCase,
isInstanceOf(IgniteColocatedHashAggregate.class)
.and(hasAggregate())
.and(hasDistinctAggregate())
.and(hasGroups())
.and(input(isTableScan("TEST"))),
disableRules
);
}
private void checkDistinctAggHash(TestCase testCase) throws Exception {
assertPlan(testCase,
isInstanceOf(IgniteColocatedHashAggregate.class)
.and(hasAggregate())
.and(hasDistinctAggregate())
.and(hasGroups())
.and(input(isInstanceOf(IgniteExchange.class)
.and(input(isTableScan("TEST")))
)),
disableRules
);
}
private void checkColocatedDistinctAggHash(TestCase testCase) throws Exception {
assertPlan(testCase,
isInstanceOf(IgniteColocatedHashAggregate.class)
.and(hasAggregate())
.and(not(hasDistinctAggregate()))
.and(input(isInstanceOf(IgniteExchange.class)
.and(input(isInstanceOf(IgniteColocatedHashAggregate.class)
.and(hasGroups())
.and(input(isTableScan("TEST")))
))
)),
disableRules
);
}
private void checkDistinctAggWithGroupBySingle(TestCase testCase) throws Exception {
assertPlan(testCase,
nodeOrAnyChild(isInstanceOf(IgniteColocatedHashAggregate.class)
.and(hasDistinctAggregate())
.and(hasGroups())
.and(input(isTableScan("TEST")))
),
disableRules
);
}
private void checkDistinctAggWithGroupByHash(TestCase testCase) throws Exception {
assertPlan(testCase,
nodeOrAnyChild(isInstanceOf(IgniteColocatedHashAggregate.class)
.and(hasDistinctAggregate())
.and(hasGroups())
.and(input(isInstanceOf(IgniteExchange.class)
.and(input(isTableScan("TEST")))
))
),
disableRules
);
}
private void checkAggWithGroupByIndexColumnsSingle(TestCase testCase) throws Exception {
assertPlan(testCase,
nodeOrAnyChild(isInstanceOf(IgniteColocatedHashAggregate.class)
.and(hasAggregate())
.and(input(isTableScan("TEST")))
),
disableRules
);
}
private void checkAggWithGroupByIndexColumnsHash(TestCase testCase) throws Exception {
assertPlan(testCase,
nodeOrAnyChild(isInstanceOf(IgniteColocatedHashAggregate.class)
.and(hasAggregate())
.and(input(isInstanceOf(IgniteExchange.class)
.and(input(isTableScan("TEST")))
))
),
disableRules
);
}
private void checkAggWithColocatedGroupByIndexColumnsHash(TestCase testCase) throws Exception {
assertPlan(testCase,
nodeOrAnyChild(isInstanceOf(IgniteExchange.class)
.and(nodeOrAnyChild(isInstanceOf(IgniteColocatedHashAggregate.class)
.and(hasAggregate())
.and(input(isTableScan("TEST")))
))
),
disableRules
);
}
private void checkGroupWithNoAggregateSingle(TestCase testCase) throws Exception {
assertPlan(testCase,
nodeOrAnyChild(isInstanceOf(IgniteColocatedHashAggregate.class)
.and(not(hasAggregate()))
.and(hasGroups())
.and(input(isTableScan("TEST")))
),
disableRules
);
}
private void checkGroupWithNoAggregateHash(TestCase testCase) throws Exception {
assertPlan(testCase,
nodeOrAnyChild(isInstanceOf(IgniteColocatedHashAggregate.class)
.and(not(hasAggregate()))
.and(hasGroups())
.and(input(isInstanceOf(IgniteExchange.class)
.and(input(isTableScan("TEST")))
))
),
disableRules
);
}
private void checkColocatedGroupWithNoAggregateHash(TestCase testCase) throws Exception {
assertPlan(testCase,
nodeOrAnyChild(isInstanceOf(IgniteExchange.class)
.and(input(isInstanceOf(IgniteColocatedHashAggregate.class)
.and(not(hasAggregate()))
.and(hasGroups())
.and(input(isTableScan("TEST")))
))
),
disableRules
);
}
private void checkGroupsWithOrderByGroupColumnsSingle(TestCase testCase, RelCollation collation) throws Exception {
assertPlan(testCase,
isInstanceOf(IgniteSort.class)
.and(s -> s.collation().equals(collation))
.and(nodeOrAnyChild(isInstanceOf(IgniteColocatedHashAggregate.class)
.and(input(isTableScan("TEST")))
)),
disableRules
);
}
private void checkGroupsWithOrderByGroupColumnsHash(TestCase testCase, RelCollation collation) throws Exception {
assertPlan(testCase,
isInstanceOf(IgniteSort.class)
.and(s -> s.collation().equals(collation))
.and(input(isInstanceOf(IgniteColocatedHashAggregate.class)
.and(input(isInstanceOf(IgniteExchange.class)
.and(input(isTableScan("TEST")))
))
)),
disableRules
);
}
private void checkCountDistinctHash(TestCase testCase) throws Exception {
assertPlan(testCase, isInstanceOf(IgniteColocatedHashAggregate.class)
.and(hasNoGroupSets(IgniteColocatedHashAggregate::getGroupSets))
.and(input(isInstanceOf(IgniteExchange.class)
.and(hasDistribution(IgniteDistributions.single()))
.and(input(isInstanceOf(IgniteTableScan.class))))
), disableRules);
}
private void checkDerivedCollationWithOrderByGroupColumnSingle(TestCase testCase) throws Exception {
RelCollation requiredCollation = RelCollations.of(TraitUtils.createFieldCollation(0, Collation.DESC_NULLS_FIRST));
assertPlan(testCase,
isInstanceOf(IgniteSort.class)
.and(hasCollation(requiredCollation))
.and(input(isInstanceOf(IgniteColocatedHashAggregate.class))),
disableRules);
}
private void checkDerivedCollationWithOrderByGroupColumnHash(TestCase testCase) throws Exception {
RelCollation requiredCollation = RelCollations.of(TraitUtils.createFieldCollation(0, Collation.DESC_NULLS_FIRST));
assertPlan(testCase,
isInstanceOf(IgniteSort.class)
.and(hasCollation(requiredCollation))
.and(input(isInstanceOf(IgniteColocatedHashAggregate.class)
.and(hasAggregate())
)),
disableRules);
}
private void checkDerivedCollationWithOrderBySubsetOfGroupColumnsSingle(TestCase testCase) throws Exception {
RelCollation outputCollation = RelCollations.of(
TraitUtils.createFieldCollation(1, Collation.DESC_NULLS_FIRST)
);
assertPlan(testCase,
isInstanceOf(IgniteSort.class)
.and(hasCollation(outputCollation))
.and(input(isInstanceOf(IgniteColocatedHashAggregate.class))),
disableRules);
}
private void checkDerivedCollationWithOrderBySubsetOfGroupColumnsHash(TestCase testCase) throws Exception {
RelCollation outputCollation = RelCollations.of(
TraitUtils.createFieldCollation(1, Collation.DESC_NULLS_FIRST)
);
assertPlan(testCase,
isInstanceOf(IgniteSort.class)
.and(hasCollation(outputCollation))
.and(input(isInstanceOf(IgniteColocatedHashAggregate.class)
.and(input(isInstanceOf(IgniteExchange.class)
.and(hasDistribution(IgniteDistributions.single()))
))
)),
disableRules);
}
}
|
googleapis/google-cloud-java | 35,980 | java-alloydb/proto-google-cloud-alloydb-v1/src/main/java/com/google/cloud/alloydb/v1/ListDatabasesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/alloydb/v1/service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.alloydb.v1;
/**
*
*
* <pre>
* Message for ListDatabases request.
* </pre>
*
* Protobuf type {@code google.cloud.alloydb.v1.ListDatabasesRequest}
*/
public final class ListDatabasesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.alloydb.v1.ListDatabasesRequest)
ListDatabasesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListDatabasesRequest.newBuilder() to construct.
private ListDatabasesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListDatabasesRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListDatabasesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.alloydb.v1.ServiceProto
.internal_static_google_cloud_alloydb_v1_ListDatabasesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.alloydb.v1.ServiceProto
.internal_static_google_cloud_alloydb_v1_ListDatabasesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.alloydb.v1.ListDatabasesRequest.class,
com.google.cloud.alloydb.v1.ListDatabasesRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Parent value for ListDatabasesRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Parent value for ListDatabasesRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Optional. The maximum number of databases to return. The service may return
* fewer than this value. If unspecified, 2000 is the default page_size. The
* max value of page_size will be 4000, values above max will be coerced to
* max.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListDatabases` call.
* This should be provided to retrieve the subsequent page.
* This field is currently not supported, its value will be ignored if passed.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListDatabases` call.
* This should be provided to retrieve the subsequent page.
* This field is currently not supported, its value will be ignored if passed.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. Filtering results.
* This field is currently not supported, its value will be ignored if passed.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Filtering results.
* This field is currently not supported, its value will be ignored if passed.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.alloydb.v1.ListDatabasesRequest)) {
return super.equals(obj);
}
com.google.cloud.alloydb.v1.ListDatabasesRequest other =
(com.google.cloud.alloydb.v1.ListDatabasesRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.alloydb.v1.ListDatabasesRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1.ListDatabasesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1.ListDatabasesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1.ListDatabasesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1.ListDatabasesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1.ListDatabasesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1.ListDatabasesRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1.ListDatabasesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.alloydb.v1.ListDatabasesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1.ListDatabasesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.alloydb.v1.ListDatabasesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1.ListDatabasesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.alloydb.v1.ListDatabasesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Message for ListDatabases request.
* </pre>
*
* Protobuf type {@code google.cloud.alloydb.v1.ListDatabasesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.alloydb.v1.ListDatabasesRequest)
com.google.cloud.alloydb.v1.ListDatabasesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.alloydb.v1.ServiceProto
.internal_static_google_cloud_alloydb_v1_ListDatabasesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.alloydb.v1.ServiceProto
.internal_static_google_cloud_alloydb_v1_ListDatabasesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.alloydb.v1.ListDatabasesRequest.class,
com.google.cloud.alloydb.v1.ListDatabasesRequest.Builder.class);
}
// Construct using com.google.cloud.alloydb.v1.ListDatabasesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.alloydb.v1.ServiceProto
.internal_static_google_cloud_alloydb_v1_ListDatabasesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.alloydb.v1.ListDatabasesRequest getDefaultInstanceForType() {
return com.google.cloud.alloydb.v1.ListDatabasesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.alloydb.v1.ListDatabasesRequest build() {
com.google.cloud.alloydb.v1.ListDatabasesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.alloydb.v1.ListDatabasesRequest buildPartial() {
com.google.cloud.alloydb.v1.ListDatabasesRequest result =
new com.google.cloud.alloydb.v1.ListDatabasesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.alloydb.v1.ListDatabasesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.alloydb.v1.ListDatabasesRequest) {
return mergeFrom((com.google.cloud.alloydb.v1.ListDatabasesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.alloydb.v1.ListDatabasesRequest other) {
if (other == com.google.cloud.alloydb.v1.ListDatabasesRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Parent value for ListDatabasesRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Parent value for ListDatabasesRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Parent value for ListDatabasesRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Parent value for ListDatabasesRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Parent value for ListDatabasesRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. The maximum number of databases to return. The service may return
* fewer than this value. If unspecified, 2000 is the default page_size. The
* max value of page_size will be 4000, values above max will be coerced to
* max.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. The maximum number of databases to return. The service may return
* fewer than this value. If unspecified, 2000 is the default page_size. The
* max value of page_size will be 4000, values above max will be coerced to
* max.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The maximum number of databases to return. The service may return
* fewer than this value. If unspecified, 2000 is the default page_size. The
* max value of page_size will be 4000, values above max will be coerced to
* max.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListDatabases` call.
* This should be provided to retrieve the subsequent page.
* This field is currently not supported, its value will be ignored if passed.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListDatabases` call.
* This should be provided to retrieve the subsequent page.
* This field is currently not supported, its value will be ignored if passed.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListDatabases` call.
* This should be provided to retrieve the subsequent page.
* This field is currently not supported, its value will be ignored if passed.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListDatabases` call.
* This should be provided to retrieve the subsequent page.
* This field is currently not supported, its value will be ignored if passed.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListDatabases` call.
* This should be provided to retrieve the subsequent page.
* This field is currently not supported, its value will be ignored if passed.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. Filtering results.
* This field is currently not supported, its value will be ignored if passed.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Filtering results.
* This field is currently not supported, its value will be ignored if passed.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Filtering results.
* This field is currently not supported, its value will be ignored if passed.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Filtering results.
* This field is currently not supported, its value will be ignored if passed.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Filtering results.
* This field is currently not supported, its value will be ignored if passed.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.alloydb.v1.ListDatabasesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.alloydb.v1.ListDatabasesRequest)
private static final com.google.cloud.alloydb.v1.ListDatabasesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.alloydb.v1.ListDatabasesRequest();
}
public static com.google.cloud.alloydb.v1.ListDatabasesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListDatabasesRequest> PARSER =
new com.google.protobuf.AbstractParser<ListDatabasesRequest>() {
@java.lang.Override
public ListDatabasesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListDatabasesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListDatabasesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.alloydb.v1.ListDatabasesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,009 | java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/DeployPublisherModelResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/model_garden_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1beta1;
/**
*
*
* <pre>
* Response message for
* [ModelGardenService.DeployPublisherModel][google.cloud.aiplatform.v1beta1.ModelGardenService.DeployPublisherModel].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse}
*/
@java.lang.Deprecated
public final class DeployPublisherModelResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse)
DeployPublisherModelResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeployPublisherModelResponse.newBuilder() to construct.
private DeployPublisherModelResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DeployPublisherModelResponse() {
publisherModel_ = "";
endpoint_ = "";
model_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new DeployPublisherModelResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.ModelGardenServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_DeployPublisherModelResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.ModelGardenServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_DeployPublisherModelResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse.class,
com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse.Builder.class);
}
public static final int PUBLISHER_MODEL_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object publisherModel_ = "";
/**
*
*
* <pre>
* Output only. The name of the PublisherModel resource.
* Format:
* `publishers/{publisher}/models/{publisher_model}@{version_id}`, or
* `publishers/hf-{hugging-face-author}/models/{hugging-face-model-name}@001`
* </pre>
*
* <code>
* string publisher_model = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The publisherModel.
*/
@java.lang.Override
public java.lang.String getPublisherModel() {
java.lang.Object ref = publisherModel_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
publisherModel_ = s;
return s;
}
}
/**
*
*
* <pre>
* Output only. The name of the PublisherModel resource.
* Format:
* `publishers/{publisher}/models/{publisher_model}@{version_id}`, or
* `publishers/hf-{hugging-face-author}/models/{hugging-face-model-name}@001`
* </pre>
*
* <code>
* string publisher_model = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for publisherModel.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPublisherModelBytes() {
java.lang.Object ref = publisherModel_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
publisherModel_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ENDPOINT_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object endpoint_ = "";
/**
*
*
* <pre>
* Output only. The name of the Endpoint created.
* Format: `projects/{project}/locations/{location}/endpoints/{endpoint}`
* </pre>
*
* <code>
* string endpoint = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The endpoint.
*/
@java.lang.Override
public java.lang.String getEndpoint() {
java.lang.Object ref = endpoint_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
endpoint_ = s;
return s;
}
}
/**
*
*
* <pre>
* Output only. The name of the Endpoint created.
* Format: `projects/{project}/locations/{location}/endpoints/{endpoint}`
* </pre>
*
* <code>
* string endpoint = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for endpoint.
*/
@java.lang.Override
public com.google.protobuf.ByteString getEndpointBytes() {
java.lang.Object ref = endpoint_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
endpoint_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int MODEL_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object model_ = "";
/**
*
*
* <pre>
* Output only. The name of the Model created.
* Format: `projects/{project}/locations/{location}/models/{model}`
* </pre>
*
* <code>
* string model = 3 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The model.
*/
@java.lang.Override
public java.lang.String getModel() {
java.lang.Object ref = model_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
model_ = s;
return s;
}
}
/**
*
*
* <pre>
* Output only. The name of the Model created.
* Format: `projects/{project}/locations/{location}/models/{model}`
* </pre>
*
* <code>
* string model = 3 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for model.
*/
@java.lang.Override
public com.google.protobuf.ByteString getModelBytes() {
java.lang.Object ref = model_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
model_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(publisherModel_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, publisherModel_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(endpoint_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, endpoint_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(model_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, model_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(publisherModel_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, publisherModel_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(endpoint_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, endpoint_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(model_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, model_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse other =
(com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse) obj;
if (!getPublisherModel().equals(other.getPublisherModel())) return false;
if (!getEndpoint().equals(other.getEndpoint())) return false;
if (!getModel().equals(other.getModel())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PUBLISHER_MODEL_FIELD_NUMBER;
hash = (53 * hash) + getPublisherModel().hashCode();
hash = (37 * hash) + ENDPOINT_FIELD_NUMBER;
hash = (53 * hash) + getEndpoint().hashCode();
hash = (37 * hash) + MODEL_FIELD_NUMBER;
hash = (53 * hash) + getModel().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [ModelGardenService.DeployPublisherModel][google.cloud.aiplatform.v1beta1.ModelGardenService.DeployPublisherModel].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse)
com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.ModelGardenServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_DeployPublisherModelResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.ModelGardenServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_DeployPublisherModelResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse.class,
com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
publisherModel_ = "";
endpoint_ = "";
model_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1beta1.ModelGardenServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_DeployPublisherModelResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse
getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse build() {
com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse buildPartial() {
com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse result =
new com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.publisherModel_ = publisherModel_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.endpoint_ = endpoint_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.model_ = model_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse) {
return mergeFrom((com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse other) {
if (other
== com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse.getDefaultInstance())
return this;
if (!other.getPublisherModel().isEmpty()) {
publisherModel_ = other.publisherModel_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getEndpoint().isEmpty()) {
endpoint_ = other.endpoint_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getModel().isEmpty()) {
model_ = other.model_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
publisherModel_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
endpoint_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
model_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object publisherModel_ = "";
/**
*
*
* <pre>
* Output only. The name of the PublisherModel resource.
* Format:
* `publishers/{publisher}/models/{publisher_model}@{version_id}`, or
* `publishers/hf-{hugging-face-author}/models/{hugging-face-model-name}@001`
* </pre>
*
* <code>
* string publisher_model = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The publisherModel.
*/
public java.lang.String getPublisherModel() {
java.lang.Object ref = publisherModel_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
publisherModel_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Output only. The name of the PublisherModel resource.
* Format:
* `publishers/{publisher}/models/{publisher_model}@{version_id}`, or
* `publishers/hf-{hugging-face-author}/models/{hugging-face-model-name}@001`
* </pre>
*
* <code>
* string publisher_model = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for publisherModel.
*/
public com.google.protobuf.ByteString getPublisherModelBytes() {
java.lang.Object ref = publisherModel_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
publisherModel_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Output only. The name of the PublisherModel resource.
* Format:
* `publishers/{publisher}/models/{publisher_model}@{version_id}`, or
* `publishers/hf-{hugging-face-author}/models/{hugging-face-model-name}@001`
* </pre>
*
* <code>
* string publisher_model = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The publisherModel to set.
* @return This builder for chaining.
*/
public Builder setPublisherModel(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
publisherModel_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The name of the PublisherModel resource.
* Format:
* `publishers/{publisher}/models/{publisher_model}@{version_id}`, or
* `publishers/hf-{hugging-face-author}/models/{hugging-face-model-name}@001`
* </pre>
*
* <code>
* string publisher_model = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearPublisherModel() {
publisherModel_ = getDefaultInstance().getPublisherModel();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The name of the PublisherModel resource.
* Format:
* `publishers/{publisher}/models/{publisher_model}@{version_id}`, or
* `publishers/hf-{hugging-face-author}/models/{hugging-face-model-name}@001`
* </pre>
*
* <code>
* string publisher_model = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for publisherModel to set.
* @return This builder for chaining.
*/
public Builder setPublisherModelBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
publisherModel_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object endpoint_ = "";
/**
*
*
* <pre>
* Output only. The name of the Endpoint created.
* Format: `projects/{project}/locations/{location}/endpoints/{endpoint}`
* </pre>
*
* <code>
* string endpoint = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The endpoint.
*/
public java.lang.String getEndpoint() {
java.lang.Object ref = endpoint_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
endpoint_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Output only. The name of the Endpoint created.
* Format: `projects/{project}/locations/{location}/endpoints/{endpoint}`
* </pre>
*
* <code>
* string endpoint = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for endpoint.
*/
public com.google.protobuf.ByteString getEndpointBytes() {
java.lang.Object ref = endpoint_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
endpoint_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Output only. The name of the Endpoint created.
* Format: `projects/{project}/locations/{location}/endpoints/{endpoint}`
* </pre>
*
* <code>
* string endpoint = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The endpoint to set.
* @return This builder for chaining.
*/
public Builder setEndpoint(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
endpoint_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The name of the Endpoint created.
* Format: `projects/{project}/locations/{location}/endpoints/{endpoint}`
* </pre>
*
* <code>
* string endpoint = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearEndpoint() {
endpoint_ = getDefaultInstance().getEndpoint();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The name of the Endpoint created.
* Format: `projects/{project}/locations/{location}/endpoints/{endpoint}`
* </pre>
*
* <code>
* string endpoint = 2 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for endpoint to set.
* @return This builder for chaining.
*/
public Builder setEndpointBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
endpoint_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object model_ = "";
/**
*
*
* <pre>
* Output only. The name of the Model created.
* Format: `projects/{project}/locations/{location}/models/{model}`
* </pre>
*
* <code>
* string model = 3 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The model.
*/
public java.lang.String getModel() {
java.lang.Object ref = model_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
model_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Output only. The name of the Model created.
* Format: `projects/{project}/locations/{location}/models/{model}`
* </pre>
*
* <code>
* string model = 3 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for model.
*/
public com.google.protobuf.ByteString getModelBytes() {
java.lang.Object ref = model_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
model_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Output only. The name of the Model created.
* Format: `projects/{project}/locations/{location}/models/{model}`
* </pre>
*
* <code>
* string model = 3 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The model to set.
* @return This builder for chaining.
*/
public Builder setModel(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
model_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The name of the Model created.
* Format: `projects/{project}/locations/{location}/models/{model}`
* </pre>
*
* <code>
* string model = 3 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearModel() {
model_ = getDefaultInstance().getModel();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The name of the Model created.
* Format: `projects/{project}/locations/{location}/models/{model}`
* </pre>
*
* <code>
* string model = 3 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for model to set.
* @return This builder for chaining.
*/
public Builder setModelBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
model_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse)
private static final com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse();
}
public static com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DeployPublisherModelResponse> PARSER =
new com.google.protobuf.AbstractParser<DeployPublisherModelResponse>() {
@java.lang.Override
public DeployPublisherModelResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<DeployPublisherModelResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DeployPublisherModelResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.DeployPublisherModelResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/sdk-platform-java | 35,944 | java-common-protos/proto-google-common-protos/src/main/java/com/google/api/MonitoredResource.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/api/monitored_resource.proto
// Protobuf Java Version: 3.25.8
package com.google.api;
/**
*
*
* <pre>
* An object representing a resource that can be used for monitoring, logging,
* billing, or other purposes. Examples include virtual machine instances,
* databases, and storage devices such as disks. The `type` field identifies a
* [MonitoredResourceDescriptor][google.api.MonitoredResourceDescriptor] object
* that describes the resource's schema. Information in the `labels` field
* identifies the actual resource and its attributes according to the schema.
* For example, a particular Compute Engine VM instance could be represented by
* the following object, because the
* [MonitoredResourceDescriptor][google.api.MonitoredResourceDescriptor] for
* `"gce_instance"` has labels
* `"project_id"`, `"instance_id"` and `"zone"`:
*
* { "type": "gce_instance",
* "labels": { "project_id": "my-project",
* "instance_id": "12345678901234",
* "zone": "us-central1-a" }}
* </pre>
*
* Protobuf type {@code google.api.MonitoredResource}
*/
public final class MonitoredResource extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.api.MonitoredResource)
MonitoredResourceOrBuilder {
private static final long serialVersionUID = 0L;
// Use MonitoredResource.newBuilder() to construct.
private MonitoredResource(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private MonitoredResource() {
type_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new MonitoredResource();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.api.MonitoredResourceProto
.internal_static_google_api_MonitoredResource_descriptor;
}
@SuppressWarnings({"rawtypes"})
@java.lang.Override
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(
int number) {
switch (number) {
case 2:
return internalGetLabels();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.api.MonitoredResourceProto
.internal_static_google_api_MonitoredResource_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.api.MonitoredResource.class, com.google.api.MonitoredResource.Builder.class);
}
public static final int TYPE_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object type_ = "";
/**
*
*
* <pre>
* Required. The monitored resource type. This field must match
* the `type` field of a
* [MonitoredResourceDescriptor][google.api.MonitoredResourceDescriptor]
* object. For example, the type of a Compute Engine VM instance is
* `gce_instance`. Some descriptors include the service name in the type; for
* example, the type of a Datastream stream is
* `datastream.googleapis.com/Stream`.
* </pre>
*
* <code>string type = 1;</code>
*
* @return The type.
*/
@java.lang.Override
public java.lang.String getType() {
java.lang.Object ref = type_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
type_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The monitored resource type. This field must match
* the `type` field of a
* [MonitoredResourceDescriptor][google.api.MonitoredResourceDescriptor]
* object. For example, the type of a Compute Engine VM instance is
* `gce_instance`. Some descriptors include the service name in the type; for
* example, the type of a Datastream stream is
* `datastream.googleapis.com/Stream`.
* </pre>
*
* <code>string type = 1;</code>
*
* @return The bytes for type.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTypeBytes() {
java.lang.Object ref = type_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
type_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int LABELS_FIELD_NUMBER = 2;
private static final class LabelsDefaultEntryHolder {
static final com.google.protobuf.MapEntry<java.lang.String, java.lang.String> defaultEntry =
com.google.protobuf.MapEntry.<java.lang.String, java.lang.String>newDefaultInstance(
com.google.api.MonitoredResourceProto
.internal_static_google_api_MonitoredResource_LabelsEntry_descriptor,
com.google.protobuf.WireFormat.FieldType.STRING,
"",
com.google.protobuf.WireFormat.FieldType.STRING,
"");
}
@SuppressWarnings("serial")
private com.google.protobuf.MapField<java.lang.String, java.lang.String> labels_;
private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetLabels() {
if (labels_ == null) {
return com.google.protobuf.MapField.emptyMapField(LabelsDefaultEntryHolder.defaultEntry);
}
return labels_;
}
public int getLabelsCount() {
return internalGetLabels().getMap().size();
}
/**
*
*
* <pre>
* Required. Values for all of the labels listed in the associated monitored
* resource descriptor. For example, Compute Engine VM instances use the
* labels `"project_id"`, `"instance_id"`, and `"zone"`.
* </pre>
*
* <code>map<string, string> labels = 2;</code>
*/
@java.lang.Override
public boolean containsLabels(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
return internalGetLabels().getMap().containsKey(key);
}
/** Use {@link #getLabelsMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.String> getLabels() {
return getLabelsMap();
}
/**
*
*
* <pre>
* Required. Values for all of the labels listed in the associated monitored
* resource descriptor. For example, Compute Engine VM instances use the
* labels `"project_id"`, `"instance_id"`, and `"zone"`.
* </pre>
*
* <code>map<string, string> labels = 2;</code>
*/
@java.lang.Override
public java.util.Map<java.lang.String, java.lang.String> getLabelsMap() {
return internalGetLabels().getMap();
}
/**
*
*
* <pre>
* Required. Values for all of the labels listed in the associated monitored
* resource descriptor. For example, Compute Engine VM instances use the
* labels `"project_id"`, `"instance_id"`, and `"zone"`.
* </pre>
*
* <code>map<string, string> labels = 2;</code>
*/
@java.lang.Override
public /* nullable */ java.lang.String getLabelsOrDefault(
java.lang.String key,
/* nullable */
java.lang.String defaultValue) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
*
*
* <pre>
* Required. Values for all of the labels listed in the associated monitored
* resource descriptor. For example, Compute Engine VM instances use the
* labels `"project_id"`, `"instance_id"`, and `"zone"`.
* </pre>
*
* <code>map<string, string> labels = 2;</code>
*/
@java.lang.Override
public java.lang.String getLabelsOrThrow(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(type_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, type_);
}
com.google.protobuf.GeneratedMessageV3.serializeStringMapTo(
output, internalGetLabels(), LabelsDefaultEntryHolder.defaultEntry, 2);
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(type_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, type_);
}
for (java.util.Map.Entry<java.lang.String, java.lang.String> entry :
internalGetLabels().getMap().entrySet()) {
com.google.protobuf.MapEntry<java.lang.String, java.lang.String> labels__ =
LabelsDefaultEntryHolder.defaultEntry
.newBuilderForType()
.setKey(entry.getKey())
.setValue(entry.getValue())
.build();
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, labels__);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.api.MonitoredResource)) {
return super.equals(obj);
}
com.google.api.MonitoredResource other = (com.google.api.MonitoredResource) obj;
if (!getType().equals(other.getType())) return false;
if (!internalGetLabels().equals(other.internalGetLabels())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + TYPE_FIELD_NUMBER;
hash = (53 * hash) + getType().hashCode();
if (!internalGetLabels().getMap().isEmpty()) {
hash = (37 * hash) + LABELS_FIELD_NUMBER;
hash = (53 * hash) + internalGetLabels().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.api.MonitoredResource parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.MonitoredResource parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.MonitoredResource parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.MonitoredResource parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.MonitoredResource parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.MonitoredResource parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.MonitoredResource parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.api.MonitoredResource parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.api.MonitoredResource parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.api.MonitoredResource parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.api.MonitoredResource parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.api.MonitoredResource parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.api.MonitoredResource prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* An object representing a resource that can be used for monitoring, logging,
* billing, or other purposes. Examples include virtual machine instances,
* databases, and storage devices such as disks. The `type` field identifies a
* [MonitoredResourceDescriptor][google.api.MonitoredResourceDescriptor] object
* that describes the resource's schema. Information in the `labels` field
* identifies the actual resource and its attributes according to the schema.
* For example, a particular Compute Engine VM instance could be represented by
* the following object, because the
* [MonitoredResourceDescriptor][google.api.MonitoredResourceDescriptor] for
* `"gce_instance"` has labels
* `"project_id"`, `"instance_id"` and `"zone"`:
*
* { "type": "gce_instance",
* "labels": { "project_id": "my-project",
* "instance_id": "12345678901234",
* "zone": "us-central1-a" }}
* </pre>
*
* Protobuf type {@code google.api.MonitoredResource}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.api.MonitoredResource)
com.google.api.MonitoredResourceOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.api.MonitoredResourceProto
.internal_static_google_api_MonitoredResource_descriptor;
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(
int number) {
switch (number) {
case 2:
return internalGetLabels();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMutableMapFieldReflection(
int number) {
switch (number) {
case 2:
return internalGetMutableLabels();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.api.MonitoredResourceProto
.internal_static_google_api_MonitoredResource_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.api.MonitoredResource.class,
com.google.api.MonitoredResource.Builder.class);
}
// Construct using com.google.api.MonitoredResource.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
type_ = "";
internalGetMutableLabels().clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.api.MonitoredResourceProto
.internal_static_google_api_MonitoredResource_descriptor;
}
@java.lang.Override
public com.google.api.MonitoredResource getDefaultInstanceForType() {
return com.google.api.MonitoredResource.getDefaultInstance();
}
@java.lang.Override
public com.google.api.MonitoredResource build() {
com.google.api.MonitoredResource result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.api.MonitoredResource buildPartial() {
com.google.api.MonitoredResource result = new com.google.api.MonitoredResource(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.api.MonitoredResource result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.type_ = type_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.labels_ = internalGetLabels();
result.labels_.makeImmutable();
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.api.MonitoredResource) {
return mergeFrom((com.google.api.MonitoredResource) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.api.MonitoredResource other) {
if (other == com.google.api.MonitoredResource.getDefaultInstance()) return this;
if (!other.getType().isEmpty()) {
type_ = other.type_;
bitField0_ |= 0x00000001;
onChanged();
}
internalGetMutableLabels().mergeFrom(other.internalGetLabels());
bitField0_ |= 0x00000002;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
type_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
com.google.protobuf.MapEntry<java.lang.String, java.lang.String> labels__ =
input.readMessage(
LabelsDefaultEntryHolder.defaultEntry.getParserForType(),
extensionRegistry);
internalGetMutableLabels()
.getMutableMap()
.put(labels__.getKey(), labels__.getValue());
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object type_ = "";
/**
*
*
* <pre>
* Required. The monitored resource type. This field must match
* the `type` field of a
* [MonitoredResourceDescriptor][google.api.MonitoredResourceDescriptor]
* object. For example, the type of a Compute Engine VM instance is
* `gce_instance`. Some descriptors include the service name in the type; for
* example, the type of a Datastream stream is
* `datastream.googleapis.com/Stream`.
* </pre>
*
* <code>string type = 1;</code>
*
* @return The type.
*/
public java.lang.String getType() {
java.lang.Object ref = type_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
type_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The monitored resource type. This field must match
* the `type` field of a
* [MonitoredResourceDescriptor][google.api.MonitoredResourceDescriptor]
* object. For example, the type of a Compute Engine VM instance is
* `gce_instance`. Some descriptors include the service name in the type; for
* example, the type of a Datastream stream is
* `datastream.googleapis.com/Stream`.
* </pre>
*
* <code>string type = 1;</code>
*
* @return The bytes for type.
*/
public com.google.protobuf.ByteString getTypeBytes() {
java.lang.Object ref = type_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
type_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The monitored resource type. This field must match
* the `type` field of a
* [MonitoredResourceDescriptor][google.api.MonitoredResourceDescriptor]
* object. For example, the type of a Compute Engine VM instance is
* `gce_instance`. Some descriptors include the service name in the type; for
* example, the type of a Datastream stream is
* `datastream.googleapis.com/Stream`.
* </pre>
*
* <code>string type = 1;</code>
*
* @param value The type to set.
* @return This builder for chaining.
*/
public Builder setType(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
type_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The monitored resource type. This field must match
* the `type` field of a
* [MonitoredResourceDescriptor][google.api.MonitoredResourceDescriptor]
* object. For example, the type of a Compute Engine VM instance is
* `gce_instance`. Some descriptors include the service name in the type; for
* example, the type of a Datastream stream is
* `datastream.googleapis.com/Stream`.
* </pre>
*
* <code>string type = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearType() {
type_ = getDefaultInstance().getType();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The monitored resource type. This field must match
* the `type` field of a
* [MonitoredResourceDescriptor][google.api.MonitoredResourceDescriptor]
* object. For example, the type of a Compute Engine VM instance is
* `gce_instance`. Some descriptors include the service name in the type; for
* example, the type of a Datastream stream is
* `datastream.googleapis.com/Stream`.
* </pre>
*
* <code>string type = 1;</code>
*
* @param value The bytes for type to set.
* @return This builder for chaining.
*/
public Builder setTypeBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
type_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.protobuf.MapField<java.lang.String, java.lang.String> labels_;
private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetLabels() {
if (labels_ == null) {
return com.google.protobuf.MapField.emptyMapField(LabelsDefaultEntryHolder.defaultEntry);
}
return labels_;
}
private com.google.protobuf.MapField<java.lang.String, java.lang.String>
internalGetMutableLabels() {
if (labels_ == null) {
labels_ = com.google.protobuf.MapField.newMapField(LabelsDefaultEntryHolder.defaultEntry);
}
if (!labels_.isMutable()) {
labels_ = labels_.copy();
}
bitField0_ |= 0x00000002;
onChanged();
return labels_;
}
public int getLabelsCount() {
return internalGetLabels().getMap().size();
}
/**
*
*
* <pre>
* Required. Values for all of the labels listed in the associated monitored
* resource descriptor. For example, Compute Engine VM instances use the
* labels `"project_id"`, `"instance_id"`, and `"zone"`.
* </pre>
*
* <code>map<string, string> labels = 2;</code>
*/
@java.lang.Override
public boolean containsLabels(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
return internalGetLabels().getMap().containsKey(key);
}
/** Use {@link #getLabelsMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.String> getLabels() {
return getLabelsMap();
}
/**
*
*
* <pre>
* Required. Values for all of the labels listed in the associated monitored
* resource descriptor. For example, Compute Engine VM instances use the
* labels `"project_id"`, `"instance_id"`, and `"zone"`.
* </pre>
*
* <code>map<string, string> labels = 2;</code>
*/
@java.lang.Override
public java.util.Map<java.lang.String, java.lang.String> getLabelsMap() {
return internalGetLabels().getMap();
}
/**
*
*
* <pre>
* Required. Values for all of the labels listed in the associated monitored
* resource descriptor. For example, Compute Engine VM instances use the
* labels `"project_id"`, `"instance_id"`, and `"zone"`.
* </pre>
*
* <code>map<string, string> labels = 2;</code>
*/
@java.lang.Override
public /* nullable */ java.lang.String getLabelsOrDefault(
java.lang.String key,
/* nullable */
java.lang.String defaultValue) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
*
*
* <pre>
* Required. Values for all of the labels listed in the associated monitored
* resource descriptor. For example, Compute Engine VM instances use the
* labels `"project_id"`, `"instance_id"`, and `"zone"`.
* </pre>
*
* <code>map<string, string> labels = 2;</code>
*/
@java.lang.Override
public java.lang.String getLabelsOrThrow(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
public Builder clearLabels() {
bitField0_ = (bitField0_ & ~0x00000002);
internalGetMutableLabels().getMutableMap().clear();
return this;
}
/**
*
*
* <pre>
* Required. Values for all of the labels listed in the associated monitored
* resource descriptor. For example, Compute Engine VM instances use the
* labels `"project_id"`, `"instance_id"`, and `"zone"`.
* </pre>
*
* <code>map<string, string> labels = 2;</code>
*/
public Builder removeLabels(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
internalGetMutableLabels().getMutableMap().remove(key);
return this;
}
/** Use alternate mutation accessors instead. */
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.String> getMutableLabels() {
bitField0_ |= 0x00000002;
return internalGetMutableLabels().getMutableMap();
}
/**
*
*
* <pre>
* Required. Values for all of the labels listed in the associated monitored
* resource descriptor. For example, Compute Engine VM instances use the
* labels `"project_id"`, `"instance_id"`, and `"zone"`.
* </pre>
*
* <code>map<string, string> labels = 2;</code>
*/
public Builder putLabels(java.lang.String key, java.lang.String value) {
if (key == null) {
throw new NullPointerException("map key");
}
if (value == null) {
throw new NullPointerException("map value");
}
internalGetMutableLabels().getMutableMap().put(key, value);
bitField0_ |= 0x00000002;
return this;
}
/**
*
*
* <pre>
* Required. Values for all of the labels listed in the associated monitored
* resource descriptor. For example, Compute Engine VM instances use the
* labels `"project_id"`, `"instance_id"`, and `"zone"`.
* </pre>
*
* <code>map<string, string> labels = 2;</code>
*/
public Builder putAllLabels(java.util.Map<java.lang.String, java.lang.String> values) {
internalGetMutableLabels().getMutableMap().putAll(values);
bitField0_ |= 0x00000002;
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.api.MonitoredResource)
}
// @@protoc_insertion_point(class_scope:google.api.MonitoredResource)
private static final com.google.api.MonitoredResource DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.api.MonitoredResource();
}
public static com.google.api.MonitoredResource getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<MonitoredResource> PARSER =
new com.google.protobuf.AbstractParser<MonitoredResource>() {
@java.lang.Override
public MonitoredResource parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<MonitoredResource> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<MonitoredResource> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.api.MonitoredResource getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/flink-cdc | 36,021 | flink-cdc-runtime/src/test/java/org/apache/flink/cdc/runtime/operators/transform/PreTransformOperatorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.cdc.runtime.operators.transform;
import org.apache.flink.cdc.common.data.binary.BinaryStringData;
import org.apache.flink.cdc.common.event.AddColumnEvent;
import org.apache.flink.cdc.common.event.CreateTableEvent;
import org.apache.flink.cdc.common.event.DataChangeEvent;
import org.apache.flink.cdc.common.event.Event;
import org.apache.flink.cdc.common.event.TableId;
import org.apache.flink.cdc.common.schema.Column;
import org.apache.flink.cdc.common.schema.Schema;
import org.apache.flink.cdc.common.source.SupportedMetadataColumn;
import org.apache.flink.cdc.common.types.DataTypes;
import org.apache.flink.cdc.common.types.RowType;
import org.apache.flink.cdc.runtime.testutils.operators.RegularEventOperatorTestHarness;
import org.apache.flink.cdc.runtime.typeutils.BinaryRecordDataGenerator;
import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import org.testcontainers.shaded.com.google.common.collect.ImmutableMap;
import java.util.Collections;
/** Unit tests for the {@link PreTransformOperator}. */
class PreTransformOperatorTest {
private static final TableId CUSTOMERS_TABLEID =
TableId.tableId("my_company", "my_branch", "customers");
private static final Schema CUSTOMERS_SCHEMA =
Schema.newBuilder()
.physicalColumn("col1", DataTypes.STRING())
.physicalColumn("col2", DataTypes.STRING())
.primaryKey("col1")
.build();
private static final Schema CUSTOMERS_LATEST_SCHEMA =
Schema.newBuilder()
.physicalColumn("col1", DataTypes.STRING())
.physicalColumn("col2", DataTypes.STRING())
.physicalColumn("col3", DataTypes.STRING())
.primaryKey("col1")
.build();
private static final Schema EXPECT_SCHEMA =
Schema.newBuilder()
.physicalColumn("col1", DataTypes.STRING())
.physicalColumn("col2", DataTypes.STRING())
.primaryKey("col2")
.partitionKey("col12")
.options(ImmutableMap.of("key1", "value1", "key2", "value2"))
.build();
private static final Schema EXPECT_LATEST_SCHEMA =
Schema.newBuilder()
.physicalColumn("col1", DataTypes.STRING())
.physicalColumn("col2", DataTypes.STRING())
.physicalColumn("col3", DataTypes.STRING())
.primaryKey("col2")
.partitionKey("col12")
.options(ImmutableMap.of("key1", "value1", "key2", "value2"))
.build();
private static final Schema NULLABILITY_SCHEMA =
Schema.newBuilder()
.physicalColumn("id", DataTypes.STRING().notNull())
.physicalColumn("name", DataTypes.STRING())
.primaryKey("id")
.partitionKey("id")
.options(ImmutableMap.of("key1", "value1", "key2", "value2"))
.build();
private static final Schema EXPECTED_NULLABILITY_SCHEMA =
Schema.newBuilder()
.physicalColumn("id", DataTypes.STRING().notNull())
.physicalColumn("name", DataTypes.STRING())
.primaryKey("id")
.partitionKey("id")
.options(ImmutableMap.of("key1", "value1", "key2", "value2"))
.build();
private static final Schema REFERENCED_SCHEMA =
Schema.newBuilder()
.physicalColumn("id", DataTypes.STRING().notNull())
.physicalColumn("age", DataTypes.INT())
.physicalColumn("name", DataTypes.STRING())
.physicalColumn("ref1", DataTypes.STRING())
.physicalColumn("ref2", DataTypes.INT())
.primaryKey("id")
.partitionKey("id")
.options(ImmutableMap.of("key1", "value1", "key2", "value2"))
.build();
private static final Schema EXPECTED_REFERENCED_SCHEMA =
Schema.newBuilder()
.physicalColumn("id", DataTypes.STRING().notNull())
.physicalColumn("age", DataTypes.INT())
.physicalColumn("ref1", DataTypes.STRING())
.physicalColumn("ref2", DataTypes.INT())
.primaryKey("id")
.partitionKey("id")
.options(ImmutableMap.of("key1", "value1", "key2", "value2"))
.build();
private static final Schema WILDCARD_SCHEMA =
Schema.newBuilder()
.physicalColumn("id", DataTypes.STRING().notNull())
.physicalColumn("age", DataTypes.INT())
.physicalColumn("name", DataTypes.STRING())
.primaryKey("id")
.partitionKey("id")
.options(ImmutableMap.of("key1", "value1", "key2", "value2"))
.build();
private static final Schema EXPECTED_WILDCARD_SCHEMA =
Schema.newBuilder()
.physicalColumn("id", DataTypes.STRING().notNull())
.physicalColumn("age", DataTypes.INT())
.physicalColumn("name", DataTypes.STRING())
.primaryKey("id")
.partitionKey("id")
.options(ImmutableMap.of("key1", "value1", "key2", "value2"))
.build();
private static final TableId METADATA_TABLEID =
TableId.tableId("my_company", "my_branch", "metadata_table");
private static final Schema METADATA_SCHEMA =
Schema.newBuilder()
.physicalColumn("id", DataTypes.STRING().notNull())
.physicalColumn("age", DataTypes.INT())
.physicalColumn("name", DataTypes.STRING())
.primaryKey("id")
.options(ImmutableMap.of("key1", "value1", "key2", "value2"))
.build();
private static final Schema EXPECTED_METADATA_SCHEMA =
Schema.newBuilder()
.physicalColumn("id", DataTypes.STRING().notNull())
.physicalColumn("age", DataTypes.INT())
.physicalColumn("name", DataTypes.STRING())
.primaryKey("id")
.options(ImmutableMap.of("key1", "value1", "key2", "value2"))
.build();
private static final TableId METADATA_AS_TABLEID =
TableId.tableId("my_company", "my_branch", "metadata_as_table");
private static final Schema METADATA_AS_SCHEMA =
Schema.newBuilder()
.physicalColumn("sid", DataTypes.INT())
.physicalColumn("name", DataTypes.STRING())
.physicalColumn("name_upper", DataTypes.STRING())
.physicalColumn("tbname", DataTypes.STRING())
.primaryKey("sid")
.build();
private static final Schema MULTITRANSFORM_SCHEMA =
Schema.newBuilder()
.physicalColumn("id", DataTypes.INT())
.physicalColumn("age", DataTypes.INT())
.physicalColumn("name", DataTypes.STRING())
.physicalColumn("sex", DataTypes.STRING())
.primaryKey("id")
.build();
private static final Schema EXPECTED_MULTITRANSFORM_SCHEMA =
Schema.newBuilder()
.physicalColumn("id", DataTypes.INT())
.physicalColumn("age", DataTypes.INT())
.physicalColumn("name", DataTypes.STRING())
.primaryKey("id")
.build();
private static final Schema COL_NAME_MAPPING_SCHEMA =
Schema.newBuilder()
.physicalColumn("foo", DataTypes.INT())
.physicalColumn("bar", DataTypes.INT())
.physicalColumn("foo-bar", DataTypes.INT())
.physicalColumn("bar-foo", DataTypes.INT())
.physicalColumn("class", DataTypes.INT())
.build();
private static final Schema EXPECTED_COL_NAME_MAPPING_SCHEMA =
Schema.newBuilder()
.physicalColumn("foo", DataTypes.INT())
.physicalColumn("bar", DataTypes.INT())
.physicalColumn("foo-bar", DataTypes.INT())
.physicalColumn("bar-foo", DataTypes.INT())
.physicalColumn("class", DataTypes.INT())
.build();
@Test
void testEventTransform() throws Exception {
PreTransformOperator transform =
PreTransformOperator.newBuilder()
.addTransform(
CUSTOMERS_TABLEID.identifier(),
"*, concat(col1,col2) col12",
null,
"col2",
"col12",
"key1=value1,key2=value2",
null,
new SupportedMetadataColumn[0])
.build();
RegularEventOperatorTestHarness<PreTransformOperator, Event>
transformFunctionEventEventOperatorTestHarness =
RegularEventOperatorTestHarness.with(transform, 1);
// Initialization
transformFunctionEventEventOperatorTestHarness.open();
// Create table
CreateTableEvent createTableEvent =
new CreateTableEvent(CUSTOMERS_TABLEID, CUSTOMERS_SCHEMA);
// Add column
AddColumnEvent.ColumnWithPosition columnWithPosition =
new AddColumnEvent.ColumnWithPosition(
Column.physicalColumn("col3", DataTypes.STRING()));
AddColumnEvent addColumnEvent =
new AddColumnEvent(
CUSTOMERS_TABLEID, Collections.singletonList(columnWithPosition));
BinaryRecordDataGenerator recordDataGenerator =
new BinaryRecordDataGenerator(((RowType) CUSTOMERS_LATEST_SCHEMA.toRowDataType()));
BinaryRecordDataGenerator recordDataGeneratorExpect =
new BinaryRecordDataGenerator(((RowType) EXPECT_LATEST_SCHEMA.toRowDataType()));
// Insert
DataChangeEvent insertEvent =
DataChangeEvent.insertEvent(
CUSTOMERS_TABLEID,
recordDataGenerator.generate(
new Object[] {
new BinaryStringData("1"),
new BinaryStringData("2"),
new BinaryStringData("3"),
}));
DataChangeEvent insertEventExpect =
DataChangeEvent.insertEvent(
CUSTOMERS_TABLEID,
recordDataGeneratorExpect.generate(
new Object[] {
new BinaryStringData("1"),
new BinaryStringData("2"),
new BinaryStringData("3")
}));
// Update
DataChangeEvent updateEvent =
DataChangeEvent.updateEvent(
CUSTOMERS_TABLEID,
recordDataGenerator.generate(
new Object[] {
new BinaryStringData("1"),
new BinaryStringData("2"),
new BinaryStringData("3")
}),
recordDataGenerator.generate(
new Object[] {
new BinaryStringData("1"),
new BinaryStringData("3"),
new BinaryStringData("3")
}));
DataChangeEvent updateEventExpect =
DataChangeEvent.updateEvent(
CUSTOMERS_TABLEID,
recordDataGeneratorExpect.generate(
new Object[] {
new BinaryStringData("1"),
new BinaryStringData("2"),
new BinaryStringData("3")
}),
recordDataGeneratorExpect.generate(
new Object[] {
new BinaryStringData("1"),
new BinaryStringData("3"),
new BinaryStringData("3")
}));
transform.processElement(new StreamRecord<>(createTableEvent));
Assertions.assertThat(
transformFunctionEventEventOperatorTestHarness.getOutputRecords().poll())
.isEqualTo(
new StreamRecord<>(new CreateTableEvent(CUSTOMERS_TABLEID, EXPECT_SCHEMA)));
transform.processElement(new StreamRecord<>(addColumnEvent));
Assertions.assertThat(
transformFunctionEventEventOperatorTestHarness.getOutputRecords().poll())
.isEqualTo(
new StreamRecord<>(
new AddColumnEvent(
CUSTOMERS_TABLEID,
Collections.singletonList(
new AddColumnEvent.ColumnWithPosition(
Column.physicalColumn(
"col3", DataTypes.STRING()),
AddColumnEvent.ColumnPosition.AFTER,
"col2")))));
transform.processElement(new StreamRecord<>(insertEvent));
Assertions.assertThat(
transformFunctionEventEventOperatorTestHarness.getOutputRecords().poll())
.isEqualTo(new StreamRecord<>(insertEventExpect));
transform.processElement(new StreamRecord<>(updateEvent));
Assertions.assertThat(
transformFunctionEventEventOperatorTestHarness.getOutputRecords().poll())
.isEqualTo(new StreamRecord<>(updateEventExpect));
transformFunctionEventEventOperatorTestHarness.close();
}
@Test
void testNullabilityColumn() throws Exception {
PreTransformOperator transform =
PreTransformOperator.newBuilder()
.addTransform(
CUSTOMERS_TABLEID.identifier(),
"id, upper(id) uid, name, upper(name) uname",
null,
"id",
"id",
"key1=value1,key2=value2",
null,
new SupportedMetadataColumn[0])
.build();
RegularEventOperatorTestHarness<PreTransformOperator, Event>
transformFunctionEventEventOperatorTestHarness =
RegularEventOperatorTestHarness.with(transform, 1);
// Initialization
transformFunctionEventEventOperatorTestHarness.open();
// Create table
CreateTableEvent createTableEvent =
new CreateTableEvent(CUSTOMERS_TABLEID, NULLABILITY_SCHEMA);
transform.processElement(new StreamRecord<>(createTableEvent));
Assertions.assertThat(
transformFunctionEventEventOperatorTestHarness.getOutputRecords().poll())
.isEqualTo(
new StreamRecord<>(
new CreateTableEvent(
CUSTOMERS_TABLEID, EXPECTED_NULLABILITY_SCHEMA)));
transformFunctionEventEventOperatorTestHarness.close();
}
@Test
void testReduceTransformColumn() throws Exception {
PreTransformOperator transform =
PreTransformOperator.newBuilder()
.addTransform(
CUSTOMERS_TABLEID.identifier(),
"id, upper(id) as uid, age + 1 as newage, lower(ref1) as ref1",
"newage > 17 and ref2 > 17",
"id",
"id",
"key1=value1,key2=value2",
null,
new SupportedMetadataColumn[0])
.build();
RegularEventOperatorTestHarness<PreTransformOperator, Event>
transformFunctionEventEventOperatorTestHarness =
RegularEventOperatorTestHarness.with(transform, 1);
// Initialization
transformFunctionEventEventOperatorTestHarness.open();
// Create table
CreateTableEvent createTableEvent =
new CreateTableEvent(CUSTOMERS_TABLEID, REFERENCED_SCHEMA);
transform.processElement(new StreamRecord<>(createTableEvent));
Assertions.assertThat(
transformFunctionEventEventOperatorTestHarness.getOutputRecords().poll())
.isEqualTo(
new StreamRecord<>(
new CreateTableEvent(
CUSTOMERS_TABLEID, EXPECTED_REFERENCED_SCHEMA)));
BinaryRecordDataGenerator recordDataGenerator =
new BinaryRecordDataGenerator(((RowType) REFERENCED_SCHEMA.toRowDataType()));
BinaryRecordDataGenerator recordDataGeneratorExpect =
new BinaryRecordDataGenerator(
((RowType) EXPECTED_REFERENCED_SCHEMA.toRowDataType()));
// Insert
DataChangeEvent insertEvent =
DataChangeEvent.insertEvent(
CUSTOMERS_TABLEID,
recordDataGenerator.generate(
new Object[] {
new BinaryStringData("id001"),
17,
new BinaryStringData("Alice"),
new BinaryStringData("Reference"),
42,
}));
DataChangeEvent insertEventExpect =
DataChangeEvent.insertEvent(
CUSTOMERS_TABLEID,
recordDataGeneratorExpect.generate(
new Object[] {
new BinaryStringData("id001"),
17,
new BinaryStringData("Reference"),
42
}));
// Update
DataChangeEvent updateEvent =
DataChangeEvent.updateEvent(
CUSTOMERS_TABLEID,
recordDataGenerator.generate(
new Object[] {
new BinaryStringData("id001"),
17,
new BinaryStringData("Alice"),
new BinaryStringData("Reference"),
42,
}),
recordDataGenerator.generate(
new Object[] {
new BinaryStringData("id001"),
18,
new BinaryStringData("Arisu"),
new BinaryStringData("UpdatedReference"),
41,
}));
DataChangeEvent updateEventExpect =
DataChangeEvent.updateEvent(
CUSTOMERS_TABLEID,
recordDataGeneratorExpect.generate(
new Object[] {
new BinaryStringData("id001"),
17,
new BinaryStringData("Reference"),
42
}),
recordDataGeneratorExpect.generate(
new Object[] {
new BinaryStringData("id001"),
18,
new BinaryStringData("UpdatedReference"),
41
}));
transform.processElement(new StreamRecord<>(insertEvent));
Assertions.assertThat(
transformFunctionEventEventOperatorTestHarness.getOutputRecords().poll())
.isEqualTo(new StreamRecord<>(insertEventExpect));
transform.processElement(new StreamRecord<>(updateEvent));
Assertions.assertThat(
transformFunctionEventEventOperatorTestHarness.getOutputRecords().poll())
.isEqualTo(new StreamRecord<>(updateEventExpect));
transformFunctionEventEventOperatorTestHarness.close();
}
@Test
void testWildcardTransformColumn() throws Exception {
PreTransformOperator transform =
PreTransformOperator.newBuilder()
.addTransform(
CUSTOMERS_TABLEID.identifier(),
"*, age + 1 as newage",
"newage > 17",
"id",
"id",
"key1=value1,key2=value2",
null,
new SupportedMetadataColumn[0])
.build();
RegularEventOperatorTestHarness<PreTransformOperator, Event>
transformFunctionEventEventOperatorTestHarness =
RegularEventOperatorTestHarness.with(transform, 1);
// Initialization
transformFunctionEventEventOperatorTestHarness.open();
// Create table
CreateTableEvent createTableEvent =
new CreateTableEvent(CUSTOMERS_TABLEID, WILDCARD_SCHEMA);
transform.processElement(new StreamRecord<>(createTableEvent));
Assertions.assertThat(
transformFunctionEventEventOperatorTestHarness.getOutputRecords().poll())
.isEqualTo(
new StreamRecord<>(
new CreateTableEvent(CUSTOMERS_TABLEID, EXPECTED_WILDCARD_SCHEMA)));
BinaryRecordDataGenerator recordDataGenerator =
new BinaryRecordDataGenerator(((RowType) WILDCARD_SCHEMA.toRowDataType()));
BinaryRecordDataGenerator recordDataGeneratorExpect =
new BinaryRecordDataGenerator(((RowType) EXPECTED_WILDCARD_SCHEMA.toRowDataType()));
// Insert
DataChangeEvent insertEvent =
DataChangeEvent.insertEvent(
CUSTOMERS_TABLEID,
recordDataGenerator.generate(
new Object[] {
new BinaryStringData("id001"), 17, new BinaryStringData("Alice")
}));
DataChangeEvent insertEventExpect =
DataChangeEvent.insertEvent(
CUSTOMERS_TABLEID,
recordDataGeneratorExpect.generate(
new Object[] {
new BinaryStringData("id001"),
17,
new BinaryStringData("Alice"),
}));
// Update
DataChangeEvent updateEvent =
DataChangeEvent.updateEvent(
CUSTOMERS_TABLEID,
recordDataGenerator.generate(
new Object[] {
new BinaryStringData("id001"), 17, new BinaryStringData("Alice")
}),
recordDataGenerator.generate(
new Object[] {
new BinaryStringData("id001"), 18, new BinaryStringData("Arisu")
}));
DataChangeEvent updateEventExpect =
DataChangeEvent.updateEvent(
CUSTOMERS_TABLEID,
recordDataGeneratorExpect.generate(
new Object[] {
new BinaryStringData("id001"),
17,
new BinaryStringData("Alice"),
}),
recordDataGeneratorExpect.generate(
new Object[] {
new BinaryStringData("id001"),
18,
new BinaryStringData("Arisu"),
}));
transform.processElement(new StreamRecord<>(insertEvent));
Assertions.assertThat(
transformFunctionEventEventOperatorTestHarness.getOutputRecords().poll())
.isEqualTo(new StreamRecord<>(insertEventExpect));
transform.processElement(new StreamRecord<>(updateEvent));
Assertions.assertThat(
transformFunctionEventEventOperatorTestHarness.getOutputRecords().poll())
.isEqualTo(new StreamRecord<>(updateEventExpect));
transformFunctionEventEventOperatorTestHarness.close();
}
@Test
void testMetadataTransform() throws Exception {
PreTransformOperator transform =
PreTransformOperator.newBuilder()
.addTransform(
METADATA_TABLEID.identifier(),
"*, __namespace_name__ || '.' || __schema_name__ || '.' || __table_name__ identifier_name, __namespace_name__, __schema_name__, __table_name__",
" __table_name__ = 'metadata_table' ")
.build();
RegularEventOperatorTestHarness<PreTransformOperator, Event>
transformFunctionEventEventOperatorTestHarness =
RegularEventOperatorTestHarness.with(transform, 1);
// Initialization
transformFunctionEventEventOperatorTestHarness.open();
// Create table
CreateTableEvent createTableEvent = new CreateTableEvent(METADATA_TABLEID, METADATA_SCHEMA);
transform.processElement(new StreamRecord<>(createTableEvent));
Assertions.assertThat(
transformFunctionEventEventOperatorTestHarness.getOutputRecords().poll())
.isEqualTo(
new StreamRecord<>(
new CreateTableEvent(METADATA_TABLEID, EXPECTED_METADATA_SCHEMA)));
transformFunctionEventEventOperatorTestHarness.close();
}
@Test
void testMultiTransformWithDiffRefColumns() throws Exception {
PreTransformOperator transform =
PreTransformOperator.newBuilder()
.addTransform(
CUSTOMERS_TABLEID.identifier(),
"id, 'Juvenile' as roleName",
"age < 18",
"id",
null,
null,
null,
new SupportedMetadataColumn[0])
.addTransform(
CUSTOMERS_TABLEID.identifier(),
"id, name as roleName",
"age >= 18",
"id",
null,
null,
null,
new SupportedMetadataColumn[0])
.build();
RegularEventOperatorTestHarness<PreTransformOperator, Event>
transformFunctionEventEventOperatorTestHarness =
RegularEventOperatorTestHarness.with(transform, 1);
// Initialization
transformFunctionEventEventOperatorTestHarness.open();
// Create table
CreateTableEvent createTableEvent =
new CreateTableEvent(CUSTOMERS_TABLEID, MULTITRANSFORM_SCHEMA);
transform.processElement(new StreamRecord<>(createTableEvent));
Assertions.assertThat(
transformFunctionEventEventOperatorTestHarness.getOutputRecords().poll())
.isEqualTo(
new StreamRecord<>(
new CreateTableEvent(
CUSTOMERS_TABLEID, EXPECTED_MULTITRANSFORM_SCHEMA)));
transformFunctionEventEventOperatorTestHarness.close();
}
@Test
void testMultiTransformWithAsterisk() throws Exception {
PreTransformOperator transform =
PreTransformOperator.newBuilder()
.addTransform(
CUSTOMERS_TABLEID.identifier(),
"*, 'Juvenile' as roleName",
"age < 18",
"id",
null,
null,
null,
new SupportedMetadataColumn[0])
.addTransform(
CUSTOMERS_TABLEID.identifier(),
"id, age, name, sex, 'Juvenile' as roleName",
"age >= 18",
"id",
null,
null,
null,
new SupportedMetadataColumn[0])
.build();
RegularEventOperatorTestHarness<PreTransformOperator, Event>
transformFunctionEventEventOperatorTestHarness =
RegularEventOperatorTestHarness.with(transform, 1);
// Initialization
transformFunctionEventEventOperatorTestHarness.open();
// Create table
CreateTableEvent createTableEvent =
new CreateTableEvent(CUSTOMERS_TABLEID, MULTITRANSFORM_SCHEMA);
transform.processElement(new StreamRecord<>(createTableEvent));
Assertions.assertThat(
transformFunctionEventEventOperatorTestHarness.getOutputRecords().poll())
.isEqualTo(
new StreamRecord<>(
new CreateTableEvent(CUSTOMERS_TABLEID, MULTITRANSFORM_SCHEMA)));
transformFunctionEventEventOperatorTestHarness.close();
}
@Test
void testMultiTransformMissingProjection() throws Exception {
PreTransformOperator transform =
PreTransformOperator.newBuilder()
.addTransform(
CUSTOMERS_TABLEID.identifier(),
null,
"age < 18",
"id",
null,
null,
null,
new SupportedMetadataColumn[0])
.addTransform(
CUSTOMERS_TABLEID.identifier(),
"id, age, UPPER(name) as name, sex",
"age >= 18",
"id",
null,
null,
null,
new SupportedMetadataColumn[0])
.build();
RegularEventOperatorTestHarness<PreTransformOperator, Event>
transformFunctionEventEventOperatorTestHarness =
RegularEventOperatorTestHarness.with(transform, 1);
// Initialization
transformFunctionEventEventOperatorTestHarness.open();
// Create table
CreateTableEvent createTableEvent =
new CreateTableEvent(CUSTOMERS_TABLEID, MULTITRANSFORM_SCHEMA);
transform.processElement(new StreamRecord<>(createTableEvent));
Assertions.assertThat(
transformFunctionEventEventOperatorTestHarness.getOutputRecords().poll())
.isEqualTo(
new StreamRecord<>(
new CreateTableEvent(CUSTOMERS_TABLEID, MULTITRANSFORM_SCHEMA)));
transformFunctionEventEventOperatorTestHarness.close();
}
@Test
void testColumnNameMapping() throws Exception {
PreTransformOperator transform =
PreTransformOperator.newBuilder()
.addTransform(
CUSTOMERS_TABLEID.identifier(),
"foo, `foo-bar`, foo-bar AS f0, `bar-foo` AS f1, class",
" `foo-bar` > 1 and foo-bar > 1 and class > 1")
.build();
RegularEventOperatorTestHarness<PreTransformOperator, Event>
transformFunctionEventEventOperatorTestHarness =
RegularEventOperatorTestHarness.with(transform, 1);
// Initialization
transformFunctionEventEventOperatorTestHarness.open();
// Create table
CreateTableEvent createTableEvent =
new CreateTableEvent(CUSTOMERS_TABLEID, COL_NAME_MAPPING_SCHEMA);
transform.processElement(new StreamRecord<>(createTableEvent));
Assertions.assertThat(
transformFunctionEventEventOperatorTestHarness.getOutputRecords().poll())
.isEqualTo(
new StreamRecord<>(
new CreateTableEvent(
CUSTOMERS_TABLEID, EXPECTED_COL_NAME_MAPPING_SCHEMA)));
transformFunctionEventEventOperatorTestHarness.close();
}
}
|
apache/harmony | 35,937 | classlib/modules/swing/src/main/java/common/javax/swing/plaf/synth/PaintersManager.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package javax.swing.plaf.synth;
import java.awt.Graphics;
import java.util.LinkedList;
import java.util.List;
/**
* PaintersManager is a SynthPainter used to combine all the painters described
* in XML file. This class is similar to ColorInfo and FontInfo (inner classes
* in XMLSynthStyle) but placed separately because contains a lot of methods and
* the functionality is differs from just "info" functionality
*/
@SuppressWarnings("nls")
class PaintersManager extends SynthPainter {
public static final int NO_DIRECTION = -1;
/**
* PainterInfo for the search.
*/
private static class SynthPainterInfo {
private final String method;
private final int direction;
private final int state;
private final SynthPainter painter;
SynthPainterInfo(String method, int direction, int state,
SynthPainter painter) {
this.method = method;
this.direction = direction;
this.state = state;
this.painter = painter;
}
String getMethod() {
return method;
}
int getDirection() {
return direction;
}
int getState() {
return state;
}
SynthPainter getPainter() {
return painter;
}
boolean betterThan(SynthPainterInfo candidate, int refState,
String refMethod, int refDirection) {
if (this.method.equalsIgnoreCase(refMethod)
|| this.method.equals("default")) {
if (stateBetterThan(candidate.getState(), refState)) {
if ((this.direction == refDirection)
|| (this.direction == -1)) {
return true;
}
}
}
return false;
}
boolean stateBetterThan(int candidateState, int refState) {
if (((~refState) & (this.state)) == 0) {
if (((~refState) & (candidateState)) == 0) {
return refState >= candidateState;
}
return true;
}
return false;
}
}
private final List<SynthPainterInfo> painters = new LinkedList<SynthPainterInfo>();
private final SynthPainterInfo firstCandidate = new SynthPainterInfo(
"default", -1, 0, new ColorPainter()); //$NON-NLS-1$
public SynthPainter findPainter(int state, String method, int direction) {
SynthPainterInfo bestCandidate = firstCandidate;
for (SynthPainterInfo candidate : painters) {
if (candidate.betterThan(bestCandidate, state, method, direction)) {
bestCandidate = candidate;
}
}
return bestCandidate.getPainter();
}
public void setPainter(SynthPainter painter, int state, String method,
int direction) {
painters.add(new SynthPainterInfo(method, direction, state, painter));
}
@Override
public void paintArrowButtonBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "ArrowButtonBorder", -1)
.paintArrowButtonBorder(context, g, x, y, w, h);
}
@Override
public void paintArrowButtonForeground(SynthContext context, Graphics g,
int x, int y, int w, int h, int direction) {
findPainter(context.getComponentState(), "ArrowButtonForeground",
direction).paintArrowButtonForeground(context, g, x, y, w, h,
direction);
}
@Override
public void paintButtonBackground(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "ButtonBackground", -1)
.paintButtonBackground(context, g, x, y, w, h);
}
@Override
public void paintButtonBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "ButtonBorder", -1)
.paintButtonBorder(context, g, x, y, w, h);
}
@Override
public void paintCheckBoxBackground(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "CheckBoxBackground", -1)
.paintCheckBoxBackground(context, g, x, y, w, h);
}
@Override
public void paintCheckBoxBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "CheckBoxBorder", -1)
.paintCheckBoxBorder(context, g, x, y, w, h);
}
@Override
public void paintCheckBoxMenuItemBackground(SynthContext context,
Graphics g, int x, int y, int w, int h) {
findPainter(context.getComponentState(), "CheckBoxMenuItemBackground",
-1).paintCheckBoxMenuItemBackground(context, g, x, y, w, h);
}
@Override
public void paintCheckBoxMenuItemBorder(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "CheckBoxMenuItemBorder", -1)
.paintCheckBoxMenuItemBorder(context, g, x, y, w, h);
}
@Override
public void paintColorChooserBackground(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "ColorChooserBackground", -1)
.paintColorChooserBackground(context, g, x, y, w, h);
}
@Override
public void paintColorChooserBorder(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "ColorChooserBorder", -1)
.paintColorChooserBorder(context, g, x, y, w, h);
}
@Override
public void paintComboBoxBackground(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "ComboBoxBackground", -1)
.paintComboBoxBackground(context, g, x, y, w, h);
}
@Override
public void paintComboBoxBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "ComboBoxBorder", -1)
.paintComboBoxBorder(context, g, x, y, w, h);
}
@Override
public void paintDesktopIconBackground(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "DesktopIconBackground", -1)
.paintDesktopIconBackground(context, g, x, y, w, h);
}
@Override
public void paintDesktopIconBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "DesktopIconBorder", -1)
.paintDesktopIconBorder(context, g, x, y, w, h);
}
@Override
public void paintDesktopPaneBackground(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "DesktopPaneBackground", -1)
.paintDesktopPaneBackground(context, g, x, y, w, h);
}
@Override
public void paintDesktopPaneBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "DesktopPaneBorder", -1)
.paintDesktopPaneBorder(context, g, x, y, w, h);
}
@Override
public void paintEditorPaneBackground(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "EditorPaneBackground", -1)
.paintEditorPaneBackground(context, g, x, y, w, h);
}
@Override
public void paintEditorPaneBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "EditorPaneBorder", -1)
.paintEditorPaneBorder(context, g, x, y, w, h);
}
@Override
public void paintFileChooserBackground(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "FileChooserBackground", -1)
.paintFileChooserBackground(context, g, x, y, w, h);
}
@Override
public void paintFileChooserBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "FileChooserBorder", -1)
.paintFileChooserBorder(context, g, x, y, w, h);
}
@Override
public void paintFormattedTextFieldBackground(SynthContext context,
Graphics g, int x, int y, int w, int h) {
findPainter(context.getComponentState(),
"FormattedTextFieldBackground", -1)
.paintFormattedTextFieldBackground(context, g, x, y, w, h);
}
@Override
public void paintFormattedTextFieldBorder(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "FormattedTextFieldBorder", -1)
.paintFormattedTextFieldBorder(context, g, x, y, w, h);
}
@Override
public void paintInternalFrameBackground(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "InternalFrameBackground", -1)
.paintInternalFrameBackground(context, g, x, y, w, h);
}
@Override
public void paintInternalFrameBorder(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "InternalFrameBorder", -1)
.paintInternalFrameBorder(context, g, x, y, w, h);
}
@Override
public void paintInternalFrameTitlePaneBackground(SynthContext context,
Graphics g, int x, int y, int w, int h) {
findPainter(context.getComponentState(),
"InternalFrameTitlePaneBackground", -1)
.paintInternalFrameTitlePaneBackground(context, g, x, y, w, h);
}
@Override
public void paintInternalFrameTitlePaneBorder(SynthContext context,
Graphics g, int x, int y, int w, int h) {
findPainter(context.getComponentState(),
"InternalFrameTitlePaneBorder", -1)
.paintInternalFrameTitlePaneBorder(context, g, x, y, w, h);
}
@Override
public void paintLabelBackground(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "LabelBackground", -1)
.paintLabelBackground(context, g, x, y, w, h);
}
@Override
public void paintLabelBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "LabelBorder", -1)
.paintLabelBorder(context, g, x, y, w, h);
}
@Override
public void paintListBackground(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "ListBackground", -1)
.paintListBackground(context, g, x, y, w, h);
}
@Override
public void paintListBorder(SynthContext context, Graphics g, int x, int y,
int w, int h) {
findPainter(context.getComponentState(), "ListBorder", -1)
.paintListBorder(context, g, x, y, w, h);
}
@Override
public void paintMenuBackground(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "MenuBackground", -1)
.paintMenuBackground(context, g, x, y, w, h);
}
@Override
public void paintMenuBarBackground(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "MenuBarBackground", -1)
.paintMenuBarBackground(context, g, x, y, w, h);
}
@Override
public void paintMenuBarBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "MenuBarBorder", -1)
.paintMenuBarBorder(context, g, x, y, w, h);
}
@Override
public void paintMenuBorder(SynthContext context, Graphics g, int x, int y,
int w, int h) {
findPainter(context.getComponentState(), "MenuBorder", -1)
.paintMenuBorder(context, g, x, y, w, h);
}
@Override
public void paintMenuItemBackground(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "MenuItemBackground", -1)
.paintMenuItemBackground(context, g, x, y, w, h);
}
@Override
public void paintMenuItemBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "MenuItemBorder", -1)
.paintMenuItemBorder(context, g, x, y, w, h);
}
@Override
public void paintOptionPaneBackground(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "OptionPaneBackground", -1)
.paintOptionPaneBackground(context, g, x, y, w, h);
}
@Override
public void paintOptionPaneBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "OptionPaneBorder", -1)
.paintOptionPaneBorder(context, g, x, y, w, h);
}
@Override
public void paintPanelBackground(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "PanelBackground", -1)
.paintPanelBackground(context, g, x, y, w, h);
}
@Override
public void paintPanelBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "PanelBorder", -1)
.paintPanelBorder(context, g, x, y, w, h);
}
@Override
public void paintPasswordFieldBackground(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "PasswordFieldBackground", -1)
.paintPasswordFieldBackground(context, g, x, y, w, h);
}
@Override
public void paintPasswordFieldBorder(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "PasswordFieldBorder", -1)
.paintPasswordFieldBorder(context, g, x, y, w, h);
}
@Override
public void paintPopupMenuBackground(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "PopupMenuBackground", -1)
.paintPopupMenuBackground(context, g, x, y, w, h);
}
@Override
public void paintPopupMenuBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "PopupMenuBorder", -1)
.paintPopupMenuBorder(context, g, x, y, w, h);
}
@Override
public void paintProgressBarBackground(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "ProgressBarBackground", -1)
.paintProgressBarBackground(context, g, x, y, w, h);
}
@Override
public void paintProgressBarBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "ProgressBarBorder", -1)
.paintProgressBarBorder(context, g, x, y, w, h);
}
@Override
public void paintProgressBarForeground(SynthContext context, Graphics g,
int x, int y, int w, int h, int orientation) {
findPainter(context.getComponentState(),
"ProgressBarForegroundPainter", orientation)
.paintProgressBarForeground(context, g, x, y, w, h, orientation);
}
@Override
public void paintRadioButtonBackground(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "RadioButtonBackground", -1)
.paintRadioButtonBackground(context, g, x, y, w, h);
}
@Override
public void paintRadioButtonBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "RadioButtonBorder", -1)
.paintRadioButtonBorder(context, g, x, y, w, h);
}
@Override
public void paintRadioButtonMenuItemBackground(SynthContext context,
Graphics g, int x, int y, int w, int h) {
findPainter(context.getComponentState(),
"RadioButtonMenuItemBackground", -1)
.paintRadioButtonMenuItemBackground(context, g, x, y, w, h);
}
@Override
public void paintRadioButtonMenuItemBorder(SynthContext context,
Graphics g, int x, int y, int w, int h) {
findPainter(context.getComponentState(), "RadioButtonMenuItemBorder",
-1).paintRadioButtonMenuItemBorder(context, g, x, y, w, h);
}
@Override
public void paintRootPaneBackground(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "RootPaneBackground", -1)
.paintRootPaneBackground(context, g, x, y, w, h);
}
@Override
public void paintRootPaneBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "RootPaneBorder", -1)
.paintRootPaneBorder(context, g, x, y, w, h);
}
@Override
public void paintScrollBarBackground(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "ScrollBarBackground", -1)
.paintScrollBarBackground(context, g, x, y, w, h);
}
@Override
public void paintScrollBarBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "ScrollBarBorder", -1)
.paintScrollBarBorder(context, g, x, y, w, h);
}
@Override
public void paintScrollBarThumbBackground(SynthContext context, Graphics g,
int x, int y, int w, int h, int orientation) {
findPainter(context.getComponentState(),
"ScrollBarThumbBackgroundPainter", orientation)
.paintScrollBarThumbBackground(context, g, x, y, w, h,
orientation);
}
@Override
public void paintScrollBarThumbBorder(SynthContext context, Graphics g,
int x, int y, int w, int h, int orientation) {
findPainter(context.getComponentState(), "ScrollBarThumbBorderPainter",
orientation).paintScrollBarThumbBorder(context, g, x, y, w, h,
orientation);
}
@Override
public void paintScrollBarTrackBackground(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "ScrollBarTrackBackground", -1)
.paintScrollBarTrackBackground(context, g, x, y, w, h);
}
@Override
public void paintScrollBarTrackBorder(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "ScrollBarTrackBorder", -1)
.paintScrollBarTrackBorder(context, g, x, y, w, h);
}
@Override
public void paintScrollPaneBackground(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "ScrollPaneBackground", -1)
.paintScrollPaneBackground(context, g, x, y, w, h);
}
@Override
public void paintScrollPaneBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "ScrollPaneBorder", -1)
.paintScrollPaneBorder(context, g, x, y, w, h);
}
@Override
public void paintSeparatorBackground(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "SeparatorBackground", -1)
.paintSeparatorBackground(context, g, x, y, w, h);
}
@Override
public void paintSeparatorBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "SeparatorBorder", -1)
.paintSeparatorBorder(context, g, x, y, w, h);
}
@Override
public void paintSeparatorForeground(SynthContext context, Graphics g,
int x, int y, int w, int h, int orientation) {
findPainter(context.getComponentState(), "SeparatorForegroundPainter",
orientation).paintSeparatorForeground(context, g, x, y, w, h,
orientation);
}
@Override
public void paintSliderBackground(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "SliderBackground", -1)
.paintSliderBackground(context, g, x, y, w, h);
}
@Override
public void paintSliderBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "SliderBorder", -1)
.paintSliderBorder(context, g, x, y, w, h);
}
@Override
public void paintSliderThumbBackground(SynthContext context, Graphics g,
int x, int y, int w, int h, int orientation) {
findPainter(context.getComponentState(),
"SliderThumbBackgroundPainter", orientation)
.paintSliderThumbBackground(context, g, x, y, w, h, orientation);
}
@Override
public void paintSliderThumbBorder(SynthContext context, Graphics g, int x,
int y, int w, int h, int orientation) {
findPainter(context.getComponentState(), "SliderThumbBorderPainter",
orientation).paintSliderThumbBorder(context, g, x, y, w, h,
orientation);
}
@Override
public void paintSliderTrackBackground(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "SliderTrackBackground", -1)
.paintSliderTrackBackground(context, g, x, y, w, h);
}
@Override
public void paintSliderTrackBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "SliderTrackBorder", -1)
.paintSliderTrackBorder(context, g, x, y, w, h);
}
@Override
public void paintSpinnerBackground(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "SpinnerBackground", -1)
.paintSpinnerBackground(context, g, x, y, w, h);
}
@Override
public void paintSpinnerBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "SpinnerBorder", -1)
.paintSpinnerBorder(context, g, x, y, w, h);
}
@Override
public void paintSplitPaneBackground(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "SplitPaneBackground", -1)
.paintSplitPaneBackground(context, g, x, y, w, h);
}
@Override
public void paintSplitPaneBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "SplitPaneBorder", -1)
.paintSplitPaneBorder(context, g, x, y, w, h);
}
@Override
public void paintSplitPaneDividerBackground(SynthContext context,
Graphics g, int x, int y, int w, int h) {
findPainter(context.getComponentState(), "SplitPaneDividerBackground",
-1).paintSplitPaneDividerBackground(context, g, x, y, w, h);
}
@Override
public void paintSplitPaneDividerForeground(SynthContext context,
Graphics g, int x, int y, int w, int h, int orientation) {
findPainter(context.getComponentState(),
"SplitPaneDividerForegroundPainter", orientation)
.paintSplitPaneDividerForeground(context, g, x, y, w, h,
orientation);
}
@Override
public void paintSplitPaneDragDivider(SynthContext context, Graphics g,
int x, int y, int w, int h, int orientation) {
findPainter(context.getComponentState(), "SplitPaneDragDividerPainter",
orientation).paintSplitPaneDragDivider(context, g, x, y, w, h,
orientation);
}
@Override
public void paintTabbedPaneBackground(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "TabbedPaneBackground", -1)
.paintTabbedPaneBackground(context, g, x, y, w, h);
}
@Override
public void paintTabbedPaneBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "TabbedPaneBorder", -1)
.paintTabbedPaneBorder(context, g, x, y, w, h);
}
@Override
public void paintTabbedPaneContentBackground(SynthContext context,
Graphics g, int x, int y, int w, int h) {
findPainter(context.getComponentState(), "TabbedPaneContentBackground",
-1).paintTabbedPaneContentBackground(context, g, x, y, w, h);
}
@Override
public void paintTabbedPaneContentBorder(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "TabbedPaneContentBorder", -1)
.paintTabbedPaneContentBorder(context, g, x, y, w, h);
}
@Override
public void paintTabbedPaneTabAreaBackground(SynthContext context,
Graphics g, int x, int y, int w, int h) {
findPainter(context.getComponentState(), "TabbedPaneTabBackground", -1)
.paintTabbedPaneTabAreaBackground(context, g, x, y, w, h);
}
@Override
public void paintTabbedPaneTabAreaBorder(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "TabbedPaneTabAreaBorder", -1)
.paintTabbedPaneTabAreaBorder(context, g, x, y, w, h);
}
@Override
public void paintTabbedPaneTabBackground(SynthContext context, Graphics g,
int x, int y, int w, int h, int tabIndex) {
findPainter(context.getComponentState(), "TabbedPaneTabBackground", -1)
.paintTabbedPaneTabBackground(context, g, x, y, w, h, tabIndex);
}
@Override
public void paintTabbedPaneTabBorder(SynthContext context, Graphics g,
int x, int y, int w, int h, int tabIndex) {
findPainter(context.getComponentState(), "TabbedPaneTabBorder", -1)
.paintTabbedPaneTabBorder(context, g, x, y, w, h, tabIndex);
}
@Override
public void paintTableBackground(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "TableBackground", -1)
.paintTableBackground(context, g, x, y, w, h);
}
@Override
public void paintTableBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "TableBorder", -1)
.paintTableBorder(context, g, x, y, w, h);
}
@Override
public void paintTableHeaderBackground(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "TableHeaderBackground", -1)
.paintTableHeaderBackground(context, g, x, y, w, h);
}
@Override
public void paintTableHeaderBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "TableHeaderBorder", -1)
.paintTableHeaderBorder(context, g, x, y, w, h);
}
@Override
public void paintTextAreaBackground(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "TextAreaBackground", -1)
.paintTextAreaBackground(context, g, x, y, w, h);
}
@Override
public void paintTextAreaBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "TextAreaBorder", -1)
.paintTextAreaBorder(context, g, x, y, w, h);
}
@Override
public void paintTextFieldBackground(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "TextFieldBackground", -1)
.paintTextFieldBackground(context, g, x, y, w, h);
}
@Override
public void paintTextFieldBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "TextFieldBorder", -1)
.paintTextFieldBorder(context, g, x, y, w, h);
}
@Override
public void paintTextPaneBackground(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "TextPaneBackground", -1)
.paintTextPaneBackground(context, g, x, y, w, h);
}
@Override
public void paintTextPaneBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "TextPaneBorder", -1)
.paintTextPaneBorder(context, g, x, y, w, h);
}
@Override
public void paintToggleButtonBackground(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "ToggleButtonBackground", -1)
.paintToggleButtonBackground(context, g, x, y, w, h);
}
@Override
public void paintToggleButtonBorder(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "ToggleButtonBorder", -1)
.paintToggleButtonBorder(context, g, x, y, w, h);
}
@Override
public void paintToolBarBackground(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "ToolBarBackground", -1)
.paintToolBarBackground(context, g, x, y, w, h);
}
@Override
public void paintToolBarBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "ToolBarBorder", -1)
.paintToolBarBorder(context, g, x, y, w, h);
}
@Override
public void paintToolBarContentBackground(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "ToolBarContentBackground", -1)
.paintToolBarContentBackground(context, g, x, y, w, h);
}
@Override
public void paintToolBarContentBorder(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "ToolBarContentBorder", -1)
.paintToolBarContentBorder(context, g, x, y, w, h);
}
@Override
public void paintToolBarDragWindowBackground(SynthContext context,
Graphics g, int x, int y, int w, int h) {
findPainter(context.getComponentState(), "ToolBarDragWindowBackground",
-1).paintToolBarDragWindowBackground(context, g, x, y, w, h);
}
@Override
public void paintToolBarDragWindowBorder(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "ToolBarDragWindowBorder", -1)
.paintToolBarDragWindowBorder(context, g, x, y, w, h);
}
@Override
public void paintToolTipBackground(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "ToolTipBackground", -1)
.paintToolTipBackground(context, g, x, y, w, h);
}
@Override
public void paintToolTipBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "ToolTipBorder", -1)
.paintToolTipBorder(context, g, x, y, w, h);
}
@Override
public void paintTreeBackground(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "TreeBackground", -1)
.paintTreeBackground(context, g, x, y, w, h);
}
@Override
public void paintTreeBorder(SynthContext context, Graphics g, int x, int y,
int w, int h) {
findPainter(context.getComponentState(), "TreeBorder", -1)
.paintTreeBorder(context, g, x, y, w, h);
}
@Override
public void paintTreeCellBackground(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "TreeCellBackground", -1)
.paintTreeCellBackground(context, g, x, y, w, h);
}
@Override
public void paintTreeCellBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "TreeCellBorder", -1)
.paintTreeCellBorder(context, g, x, y, w, h);
}
@Override
public void paintTreeCellFocus(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "TreeCellFocus", -1)
.paintTreeCellFocus(context, g, x, y, w, h);
}
@Override
public void paintViewportBackground(SynthContext context, Graphics g,
int x, int y, int w, int h) {
findPainter(context.getComponentState(), "ViewportBackground", -1)
.paintViewportBackground(context, g, x, y, w, h);
}
@Override
public void paintViewportBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
findPainter(context.getComponentState(), "ViewportBorder", -1)
.paintViewportBorder(context, g, x, y, w, h);
}
}
|
googleads/google-ads-java | 36,173 | google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/services/ListBatchJobResultsRequest.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v19/services/batch_job_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v19.services;
/**
* <pre>
* Request message for
* [BatchJobService.ListBatchJobResults][google.ads.googleads.v19.services.BatchJobService.ListBatchJobResults].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.services.ListBatchJobResultsRequest}
*/
public final class ListBatchJobResultsRequest extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v19.services.ListBatchJobResultsRequest)
ListBatchJobResultsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListBatchJobResultsRequest.newBuilder() to construct.
private ListBatchJobResultsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListBatchJobResultsRequest() {
resourceName_ = "";
pageToken_ = "";
responseContentType_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ListBatchJobResultsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.services.BatchJobServiceProto.internal_static_google_ads_googleads_v19_services_ListBatchJobResultsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.services.BatchJobServiceProto.internal_static_google_ads_googleads_v19_services_ListBatchJobResultsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.services.ListBatchJobResultsRequest.class, com.google.ads.googleads.v19.services.ListBatchJobResultsRequest.Builder.class);
}
public static final int RESOURCE_NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object resourceName_ = "";
/**
* <pre>
* Required. The resource name of the batch job whose results are being
* listed.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return The resourceName.
*/
@java.lang.Override
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
}
}
/**
* <pre>
* Required. The resource name of the batch job whose results are being
* listed.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for resourceName.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
* <pre>
* Token of the page to retrieve. If not specified, the first
* page of results will be returned. Use the value obtained from
* `next_page_token` in the previous response in order to request
* the next page of results.
* </pre>
*
* <code>string page_token = 2;</code>
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
* <pre>
* Token of the page to retrieve. If not specified, the first
* page of results will be returned. Use the value obtained from
* `next_page_token` in the previous response in order to request
* the next page of results.
* </pre>
*
* <code>string page_token = 2;</code>
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 3;
private int pageSize_ = 0;
/**
* <pre>
* Number of elements to retrieve in a single page.
* When a page request is too large, the server may decide to
* further limit the number of returned resources.
* </pre>
*
* <code>int32 page_size = 3;</code>
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int RESPONSE_CONTENT_TYPE_FIELD_NUMBER = 4;
private int responseContentType_ = 0;
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 4;</code>
* @return The enum numeric value on the wire for responseContentType.
*/
@java.lang.Override public int getResponseContentTypeValue() {
return responseContentType_;
}
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 4;</code>
* @return The responseContentType.
*/
@java.lang.Override public com.google.ads.googleads.v19.enums.ResponseContentTypeEnum.ResponseContentType getResponseContentType() {
com.google.ads.googleads.v19.enums.ResponseContentTypeEnum.ResponseContentType result = com.google.ads.googleads.v19.enums.ResponseContentTypeEnum.ResponseContentType.forNumber(responseContentType_);
return result == null ? com.google.ads.googleads.v19.enums.ResponseContentTypeEnum.ResponseContentType.UNRECOGNIZED : result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceName_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, pageToken_);
}
if (pageSize_ != 0) {
output.writeInt32(3, pageSize_);
}
if (responseContentType_ != com.google.ads.googleads.v19.enums.ResponseContentTypeEnum.ResponseContentType.UNSPECIFIED.getNumber()) {
output.writeEnum(4, responseContentType_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceName_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, pageToken_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(3, pageSize_);
}
if (responseContentType_ != com.google.ads.googleads.v19.enums.ResponseContentTypeEnum.ResponseContentType.UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(4, responseContentType_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v19.services.ListBatchJobResultsRequest)) {
return super.equals(obj);
}
com.google.ads.googleads.v19.services.ListBatchJobResultsRequest other = (com.google.ads.googleads.v19.services.ListBatchJobResultsRequest) obj;
if (!getResourceName()
.equals(other.getResourceName())) return false;
if (!getPageToken()
.equals(other.getPageToken())) return false;
if (getPageSize()
!= other.getPageSize()) return false;
if (responseContentType_ != other.responseContentType_) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getResourceName().hashCode();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + RESPONSE_CONTENT_TYPE_FIELD_NUMBER;
hash = (53 * hash) + responseContentType_;
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v19.services.ListBatchJobResultsRequest parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.ListBatchJobResultsRequest parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.ListBatchJobResultsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.ListBatchJobResultsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.ListBatchJobResultsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.ListBatchJobResultsRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.ListBatchJobResultsRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.ListBatchJobResultsRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.ListBatchJobResultsRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.ListBatchJobResultsRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.ListBatchJobResultsRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.ListBatchJobResultsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v19.services.ListBatchJobResultsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Request message for
* [BatchJobService.ListBatchJobResults][google.ads.googleads.v19.services.BatchJobService.ListBatchJobResults].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.services.ListBatchJobResultsRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.services.ListBatchJobResultsRequest)
com.google.ads.googleads.v19.services.ListBatchJobResultsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.services.BatchJobServiceProto.internal_static_google_ads_googleads_v19_services_ListBatchJobResultsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.services.BatchJobServiceProto.internal_static_google_ads_googleads_v19_services_ListBatchJobResultsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.services.ListBatchJobResultsRequest.class, com.google.ads.googleads.v19.services.ListBatchJobResultsRequest.Builder.class);
}
// Construct using com.google.ads.googleads.v19.services.ListBatchJobResultsRequest.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
resourceName_ = "";
pageToken_ = "";
pageSize_ = 0;
responseContentType_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v19.services.BatchJobServiceProto.internal_static_google_ads_googleads_v19_services_ListBatchJobResultsRequest_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.ListBatchJobResultsRequest getDefaultInstanceForType() {
return com.google.ads.googleads.v19.services.ListBatchJobResultsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v19.services.ListBatchJobResultsRequest build() {
com.google.ads.googleads.v19.services.ListBatchJobResultsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.ListBatchJobResultsRequest buildPartial() {
com.google.ads.googleads.v19.services.ListBatchJobResultsRequest result = new com.google.ads.googleads.v19.services.ListBatchJobResultsRequest(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v19.services.ListBatchJobResultsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.resourceName_ = resourceName_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.responseContentType_ = responseContentType_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v19.services.ListBatchJobResultsRequest) {
return mergeFrom((com.google.ads.googleads.v19.services.ListBatchJobResultsRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v19.services.ListBatchJobResultsRequest other) {
if (other == com.google.ads.googleads.v19.services.ListBatchJobResultsRequest.getDefaultInstance()) return this;
if (!other.getResourceName().isEmpty()) {
resourceName_ = other.resourceName_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (other.responseContentType_ != 0) {
setResponseContentTypeValue(other.getResponseContentTypeValue());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
resourceName_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24: {
pageSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
case 32: {
responseContentType_ = input.readEnum();
bitField0_ |= 0x00000008;
break;
} // case 32
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object resourceName_ = "";
/**
* <pre>
* Required. The resource name of the batch job whose results are being
* listed.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return The resourceName.
*/
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The resource name of the batch job whose results are being
* listed.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for resourceName.
*/
public com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The resource name of the batch job whose results are being
* listed.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @param value The resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceName(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
resourceName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* Required. The resource name of the batch job whose results are being
* listed.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return This builder for chaining.
*/
public Builder clearResourceName() {
resourceName_ = getDefaultInstance().getResourceName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <pre>
* Required. The resource name of the batch job whose results are being
* listed.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @param value The bytes for resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
resourceName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
* <pre>
* Token of the page to retrieve. If not specified, the first
* page of results will be returned. Use the value obtained from
* `next_page_token` in the previous response in order to request
* the next page of results.
* </pre>
*
* <code>string page_token = 2;</code>
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Token of the page to retrieve. If not specified, the first
* page of results will be returned. Use the value obtained from
* `next_page_token` in the previous response in order to request
* the next page of results.
* </pre>
*
* <code>string page_token = 2;</code>
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString
getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Token of the page to retrieve. If not specified, the first
* page of results will be returned. Use the value obtained from
* `next_page_token` in the previous response in order to request
* the next page of results.
* </pre>
*
* <code>string page_token = 2;</code>
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
pageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Token of the page to retrieve. If not specified, the first
* page of results will be returned. Use the value obtained from
* `next_page_token` in the previous response in order to request
* the next page of results.
* </pre>
*
* <code>string page_token = 2;</code>
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* Token of the page to retrieve. If not specified, the first
* page of results will be returned. Use the value obtained from
* `next_page_token` in the previous response in order to request
* the next page of results.
* </pre>
*
* <code>string page_token = 2;</code>
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int pageSize_ ;
/**
* <pre>
* Number of elements to retrieve in a single page.
* When a page request is too large, the server may decide to
* further limit the number of returned resources.
* </pre>
*
* <code>int32 page_size = 3;</code>
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
* <pre>
* Number of elements to retrieve in a single page.
* When a page request is too large, the server may decide to
* further limit the number of returned resources.
* </pre>
*
* <code>int32 page_size = 3;</code>
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Number of elements to retrieve in a single page.
* When a page request is too large, the server may decide to
* further limit the number of returned resources.
* </pre>
*
* <code>int32 page_size = 3;</code>
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000004);
pageSize_ = 0;
onChanged();
return this;
}
private int responseContentType_ = 0;
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 4;</code>
* @return The enum numeric value on the wire for responseContentType.
*/
@java.lang.Override public int getResponseContentTypeValue() {
return responseContentType_;
}
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 4;</code>
* @param value The enum numeric value on the wire for responseContentType to set.
* @return This builder for chaining.
*/
public Builder setResponseContentTypeValue(int value) {
responseContentType_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 4;</code>
* @return The responseContentType.
*/
@java.lang.Override
public com.google.ads.googleads.v19.enums.ResponseContentTypeEnum.ResponseContentType getResponseContentType() {
com.google.ads.googleads.v19.enums.ResponseContentTypeEnum.ResponseContentType result = com.google.ads.googleads.v19.enums.ResponseContentTypeEnum.ResponseContentType.forNumber(responseContentType_);
return result == null ? com.google.ads.googleads.v19.enums.ResponseContentTypeEnum.ResponseContentType.UNRECOGNIZED : result;
}
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 4;</code>
* @param value The responseContentType to set.
* @return This builder for chaining.
*/
public Builder setResponseContentType(com.google.ads.googleads.v19.enums.ResponseContentTypeEnum.ResponseContentType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
responseContentType_ = value.getNumber();
onChanged();
return this;
}
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned.
* </pre>
*
* <code>.google.ads.googleads.v19.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 4;</code>
* @return This builder for chaining.
*/
public Builder clearResponseContentType() {
bitField0_ = (bitField0_ & ~0x00000008);
responseContentType_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.services.ListBatchJobResultsRequest)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v19.services.ListBatchJobResultsRequest)
private static final com.google.ads.googleads.v19.services.ListBatchJobResultsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v19.services.ListBatchJobResultsRequest();
}
public static com.google.ads.googleads.v19.services.ListBatchJobResultsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListBatchJobResultsRequest>
PARSER = new com.google.protobuf.AbstractParser<ListBatchJobResultsRequest>() {
@java.lang.Override
public ListBatchJobResultsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListBatchJobResultsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListBatchJobResultsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.ListBatchJobResultsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 36,173 | google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/services/ListBatchJobResultsRequest.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v20/services/batch_job_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v20.services;
/**
* <pre>
* Request message for
* [BatchJobService.ListBatchJobResults][google.ads.googleads.v20.services.BatchJobService.ListBatchJobResults].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.services.ListBatchJobResultsRequest}
*/
public final class ListBatchJobResultsRequest extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v20.services.ListBatchJobResultsRequest)
ListBatchJobResultsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListBatchJobResultsRequest.newBuilder() to construct.
private ListBatchJobResultsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListBatchJobResultsRequest() {
resourceName_ = "";
pageToken_ = "";
responseContentType_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ListBatchJobResultsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.services.BatchJobServiceProto.internal_static_google_ads_googleads_v20_services_ListBatchJobResultsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.services.BatchJobServiceProto.internal_static_google_ads_googleads_v20_services_ListBatchJobResultsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.services.ListBatchJobResultsRequest.class, com.google.ads.googleads.v20.services.ListBatchJobResultsRequest.Builder.class);
}
public static final int RESOURCE_NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object resourceName_ = "";
/**
* <pre>
* Required. The resource name of the batch job whose results are being
* listed.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return The resourceName.
*/
@java.lang.Override
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
}
}
/**
* <pre>
* Required. The resource name of the batch job whose results are being
* listed.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for resourceName.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
* <pre>
* Token of the page to retrieve. If not specified, the first
* page of results will be returned. Use the value obtained from
* `next_page_token` in the previous response in order to request
* the next page of results.
* </pre>
*
* <code>string page_token = 2;</code>
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
* <pre>
* Token of the page to retrieve. If not specified, the first
* page of results will be returned. Use the value obtained from
* `next_page_token` in the previous response in order to request
* the next page of results.
* </pre>
*
* <code>string page_token = 2;</code>
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 3;
private int pageSize_ = 0;
/**
* <pre>
* Number of elements to retrieve in a single page.
* When a page request is too large, the server may decide to
* further limit the number of returned resources.
* </pre>
*
* <code>int32 page_size = 3;</code>
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int RESPONSE_CONTENT_TYPE_FIELD_NUMBER = 4;
private int responseContentType_ = 0;
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 4;</code>
* @return The enum numeric value on the wire for responseContentType.
*/
@java.lang.Override public int getResponseContentTypeValue() {
return responseContentType_;
}
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 4;</code>
* @return The responseContentType.
*/
@java.lang.Override public com.google.ads.googleads.v20.enums.ResponseContentTypeEnum.ResponseContentType getResponseContentType() {
com.google.ads.googleads.v20.enums.ResponseContentTypeEnum.ResponseContentType result = com.google.ads.googleads.v20.enums.ResponseContentTypeEnum.ResponseContentType.forNumber(responseContentType_);
return result == null ? com.google.ads.googleads.v20.enums.ResponseContentTypeEnum.ResponseContentType.UNRECOGNIZED : result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceName_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, pageToken_);
}
if (pageSize_ != 0) {
output.writeInt32(3, pageSize_);
}
if (responseContentType_ != com.google.ads.googleads.v20.enums.ResponseContentTypeEnum.ResponseContentType.UNSPECIFIED.getNumber()) {
output.writeEnum(4, responseContentType_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceName_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, pageToken_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(3, pageSize_);
}
if (responseContentType_ != com.google.ads.googleads.v20.enums.ResponseContentTypeEnum.ResponseContentType.UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(4, responseContentType_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v20.services.ListBatchJobResultsRequest)) {
return super.equals(obj);
}
com.google.ads.googleads.v20.services.ListBatchJobResultsRequest other = (com.google.ads.googleads.v20.services.ListBatchJobResultsRequest) obj;
if (!getResourceName()
.equals(other.getResourceName())) return false;
if (!getPageToken()
.equals(other.getPageToken())) return false;
if (getPageSize()
!= other.getPageSize()) return false;
if (responseContentType_ != other.responseContentType_) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getResourceName().hashCode();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + RESPONSE_CONTENT_TYPE_FIELD_NUMBER;
hash = (53 * hash) + responseContentType_;
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v20.services.ListBatchJobResultsRequest parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.ListBatchJobResultsRequest parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.ListBatchJobResultsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.ListBatchJobResultsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.ListBatchJobResultsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.ListBatchJobResultsRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.ListBatchJobResultsRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.ListBatchJobResultsRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.ListBatchJobResultsRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.ListBatchJobResultsRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.ListBatchJobResultsRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.ListBatchJobResultsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v20.services.ListBatchJobResultsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Request message for
* [BatchJobService.ListBatchJobResults][google.ads.googleads.v20.services.BatchJobService.ListBatchJobResults].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.services.ListBatchJobResultsRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.services.ListBatchJobResultsRequest)
com.google.ads.googleads.v20.services.ListBatchJobResultsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.services.BatchJobServiceProto.internal_static_google_ads_googleads_v20_services_ListBatchJobResultsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.services.BatchJobServiceProto.internal_static_google_ads_googleads_v20_services_ListBatchJobResultsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.services.ListBatchJobResultsRequest.class, com.google.ads.googleads.v20.services.ListBatchJobResultsRequest.Builder.class);
}
// Construct using com.google.ads.googleads.v20.services.ListBatchJobResultsRequest.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
resourceName_ = "";
pageToken_ = "";
pageSize_ = 0;
responseContentType_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v20.services.BatchJobServiceProto.internal_static_google_ads_googleads_v20_services_ListBatchJobResultsRequest_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.ListBatchJobResultsRequest getDefaultInstanceForType() {
return com.google.ads.googleads.v20.services.ListBatchJobResultsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v20.services.ListBatchJobResultsRequest build() {
com.google.ads.googleads.v20.services.ListBatchJobResultsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.ListBatchJobResultsRequest buildPartial() {
com.google.ads.googleads.v20.services.ListBatchJobResultsRequest result = new com.google.ads.googleads.v20.services.ListBatchJobResultsRequest(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v20.services.ListBatchJobResultsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.resourceName_ = resourceName_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.responseContentType_ = responseContentType_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v20.services.ListBatchJobResultsRequest) {
return mergeFrom((com.google.ads.googleads.v20.services.ListBatchJobResultsRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v20.services.ListBatchJobResultsRequest other) {
if (other == com.google.ads.googleads.v20.services.ListBatchJobResultsRequest.getDefaultInstance()) return this;
if (!other.getResourceName().isEmpty()) {
resourceName_ = other.resourceName_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (other.responseContentType_ != 0) {
setResponseContentTypeValue(other.getResponseContentTypeValue());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
resourceName_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24: {
pageSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
case 32: {
responseContentType_ = input.readEnum();
bitField0_ |= 0x00000008;
break;
} // case 32
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object resourceName_ = "";
/**
* <pre>
* Required. The resource name of the batch job whose results are being
* listed.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return The resourceName.
*/
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The resource name of the batch job whose results are being
* listed.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for resourceName.
*/
public com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The resource name of the batch job whose results are being
* listed.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @param value The resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceName(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
resourceName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* Required. The resource name of the batch job whose results are being
* listed.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return This builder for chaining.
*/
public Builder clearResourceName() {
resourceName_ = getDefaultInstance().getResourceName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <pre>
* Required. The resource name of the batch job whose results are being
* listed.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @param value The bytes for resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
resourceName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
* <pre>
* Token of the page to retrieve. If not specified, the first
* page of results will be returned. Use the value obtained from
* `next_page_token` in the previous response in order to request
* the next page of results.
* </pre>
*
* <code>string page_token = 2;</code>
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Token of the page to retrieve. If not specified, the first
* page of results will be returned. Use the value obtained from
* `next_page_token` in the previous response in order to request
* the next page of results.
* </pre>
*
* <code>string page_token = 2;</code>
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString
getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Token of the page to retrieve. If not specified, the first
* page of results will be returned. Use the value obtained from
* `next_page_token` in the previous response in order to request
* the next page of results.
* </pre>
*
* <code>string page_token = 2;</code>
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
pageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Token of the page to retrieve. If not specified, the first
* page of results will be returned. Use the value obtained from
* `next_page_token` in the previous response in order to request
* the next page of results.
* </pre>
*
* <code>string page_token = 2;</code>
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* Token of the page to retrieve. If not specified, the first
* page of results will be returned. Use the value obtained from
* `next_page_token` in the previous response in order to request
* the next page of results.
* </pre>
*
* <code>string page_token = 2;</code>
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int pageSize_ ;
/**
* <pre>
* Number of elements to retrieve in a single page.
* When a page request is too large, the server may decide to
* further limit the number of returned resources.
* </pre>
*
* <code>int32 page_size = 3;</code>
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
* <pre>
* Number of elements to retrieve in a single page.
* When a page request is too large, the server may decide to
* further limit the number of returned resources.
* </pre>
*
* <code>int32 page_size = 3;</code>
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Number of elements to retrieve in a single page.
* When a page request is too large, the server may decide to
* further limit the number of returned resources.
* </pre>
*
* <code>int32 page_size = 3;</code>
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000004);
pageSize_ = 0;
onChanged();
return this;
}
private int responseContentType_ = 0;
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 4;</code>
* @return The enum numeric value on the wire for responseContentType.
*/
@java.lang.Override public int getResponseContentTypeValue() {
return responseContentType_;
}
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 4;</code>
* @param value The enum numeric value on the wire for responseContentType to set.
* @return This builder for chaining.
*/
public Builder setResponseContentTypeValue(int value) {
responseContentType_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 4;</code>
* @return The responseContentType.
*/
@java.lang.Override
public com.google.ads.googleads.v20.enums.ResponseContentTypeEnum.ResponseContentType getResponseContentType() {
com.google.ads.googleads.v20.enums.ResponseContentTypeEnum.ResponseContentType result = com.google.ads.googleads.v20.enums.ResponseContentTypeEnum.ResponseContentType.forNumber(responseContentType_);
return result == null ? com.google.ads.googleads.v20.enums.ResponseContentTypeEnum.ResponseContentType.UNRECOGNIZED : result;
}
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 4;</code>
* @param value The responseContentType to set.
* @return This builder for chaining.
*/
public Builder setResponseContentType(com.google.ads.googleads.v20.enums.ResponseContentTypeEnum.ResponseContentType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
responseContentType_ = value.getNumber();
onChanged();
return this;
}
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned.
* </pre>
*
* <code>.google.ads.googleads.v20.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 4;</code>
* @return This builder for chaining.
*/
public Builder clearResponseContentType() {
bitField0_ = (bitField0_ & ~0x00000008);
responseContentType_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.services.ListBatchJobResultsRequest)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v20.services.ListBatchJobResultsRequest)
private static final com.google.ads.googleads.v20.services.ListBatchJobResultsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v20.services.ListBatchJobResultsRequest();
}
public static com.google.ads.googleads.v20.services.ListBatchJobResultsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListBatchJobResultsRequest>
PARSER = new com.google.protobuf.AbstractParser<ListBatchJobResultsRequest>() {
@java.lang.Override
public ListBatchJobResultsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListBatchJobResultsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListBatchJobResultsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.ListBatchJobResultsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 36,173 | google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/services/ListBatchJobResultsRequest.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v21/services/batch_job_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v21.services;
/**
* <pre>
* Request message for
* [BatchJobService.ListBatchJobResults][google.ads.googleads.v21.services.BatchJobService.ListBatchJobResults].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.services.ListBatchJobResultsRequest}
*/
public final class ListBatchJobResultsRequest extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v21.services.ListBatchJobResultsRequest)
ListBatchJobResultsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListBatchJobResultsRequest.newBuilder() to construct.
private ListBatchJobResultsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListBatchJobResultsRequest() {
resourceName_ = "";
pageToken_ = "";
responseContentType_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ListBatchJobResultsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.services.BatchJobServiceProto.internal_static_google_ads_googleads_v21_services_ListBatchJobResultsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.services.BatchJobServiceProto.internal_static_google_ads_googleads_v21_services_ListBatchJobResultsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.services.ListBatchJobResultsRequest.class, com.google.ads.googleads.v21.services.ListBatchJobResultsRequest.Builder.class);
}
public static final int RESOURCE_NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object resourceName_ = "";
/**
* <pre>
* Required. The resource name of the batch job whose results are being
* listed.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return The resourceName.
*/
@java.lang.Override
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
}
}
/**
* <pre>
* Required. The resource name of the batch job whose results are being
* listed.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for resourceName.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
* <pre>
* Token of the page to retrieve. If not specified, the first
* page of results will be returned. Use the value obtained from
* `next_page_token` in the previous response in order to request
* the next page of results.
* </pre>
*
* <code>string page_token = 2;</code>
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
* <pre>
* Token of the page to retrieve. If not specified, the first
* page of results will be returned. Use the value obtained from
* `next_page_token` in the previous response in order to request
* the next page of results.
* </pre>
*
* <code>string page_token = 2;</code>
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 3;
private int pageSize_ = 0;
/**
* <pre>
* Number of elements to retrieve in a single page.
* When a page request is too large, the server may decide to
* further limit the number of returned resources.
* </pre>
*
* <code>int32 page_size = 3;</code>
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int RESPONSE_CONTENT_TYPE_FIELD_NUMBER = 4;
private int responseContentType_ = 0;
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 4;</code>
* @return The enum numeric value on the wire for responseContentType.
*/
@java.lang.Override public int getResponseContentTypeValue() {
return responseContentType_;
}
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 4;</code>
* @return The responseContentType.
*/
@java.lang.Override public com.google.ads.googleads.v21.enums.ResponseContentTypeEnum.ResponseContentType getResponseContentType() {
com.google.ads.googleads.v21.enums.ResponseContentTypeEnum.ResponseContentType result = com.google.ads.googleads.v21.enums.ResponseContentTypeEnum.ResponseContentType.forNumber(responseContentType_);
return result == null ? com.google.ads.googleads.v21.enums.ResponseContentTypeEnum.ResponseContentType.UNRECOGNIZED : result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceName_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, pageToken_);
}
if (pageSize_ != 0) {
output.writeInt32(3, pageSize_);
}
if (responseContentType_ != com.google.ads.googleads.v21.enums.ResponseContentTypeEnum.ResponseContentType.UNSPECIFIED.getNumber()) {
output.writeEnum(4, responseContentType_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceName_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, pageToken_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(3, pageSize_);
}
if (responseContentType_ != com.google.ads.googleads.v21.enums.ResponseContentTypeEnum.ResponseContentType.UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(4, responseContentType_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v21.services.ListBatchJobResultsRequest)) {
return super.equals(obj);
}
com.google.ads.googleads.v21.services.ListBatchJobResultsRequest other = (com.google.ads.googleads.v21.services.ListBatchJobResultsRequest) obj;
if (!getResourceName()
.equals(other.getResourceName())) return false;
if (!getPageToken()
.equals(other.getPageToken())) return false;
if (getPageSize()
!= other.getPageSize()) return false;
if (responseContentType_ != other.responseContentType_) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getResourceName().hashCode();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + RESPONSE_CONTENT_TYPE_FIELD_NUMBER;
hash = (53 * hash) + responseContentType_;
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v21.services.ListBatchJobResultsRequest parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.ListBatchJobResultsRequest parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ListBatchJobResultsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.ListBatchJobResultsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ListBatchJobResultsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.ListBatchJobResultsRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ListBatchJobResultsRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.ListBatchJobResultsRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ListBatchJobResultsRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.ListBatchJobResultsRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ListBatchJobResultsRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.ListBatchJobResultsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v21.services.ListBatchJobResultsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Request message for
* [BatchJobService.ListBatchJobResults][google.ads.googleads.v21.services.BatchJobService.ListBatchJobResults].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.services.ListBatchJobResultsRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.services.ListBatchJobResultsRequest)
com.google.ads.googleads.v21.services.ListBatchJobResultsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.services.BatchJobServiceProto.internal_static_google_ads_googleads_v21_services_ListBatchJobResultsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.services.BatchJobServiceProto.internal_static_google_ads_googleads_v21_services_ListBatchJobResultsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.services.ListBatchJobResultsRequest.class, com.google.ads.googleads.v21.services.ListBatchJobResultsRequest.Builder.class);
}
// Construct using com.google.ads.googleads.v21.services.ListBatchJobResultsRequest.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
resourceName_ = "";
pageToken_ = "";
pageSize_ = 0;
responseContentType_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v21.services.BatchJobServiceProto.internal_static_google_ads_googleads_v21_services_ListBatchJobResultsRequest_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.ListBatchJobResultsRequest getDefaultInstanceForType() {
return com.google.ads.googleads.v21.services.ListBatchJobResultsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v21.services.ListBatchJobResultsRequest build() {
com.google.ads.googleads.v21.services.ListBatchJobResultsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.ListBatchJobResultsRequest buildPartial() {
com.google.ads.googleads.v21.services.ListBatchJobResultsRequest result = new com.google.ads.googleads.v21.services.ListBatchJobResultsRequest(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v21.services.ListBatchJobResultsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.resourceName_ = resourceName_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.responseContentType_ = responseContentType_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v21.services.ListBatchJobResultsRequest) {
return mergeFrom((com.google.ads.googleads.v21.services.ListBatchJobResultsRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v21.services.ListBatchJobResultsRequest other) {
if (other == com.google.ads.googleads.v21.services.ListBatchJobResultsRequest.getDefaultInstance()) return this;
if (!other.getResourceName().isEmpty()) {
resourceName_ = other.resourceName_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (other.responseContentType_ != 0) {
setResponseContentTypeValue(other.getResponseContentTypeValue());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
resourceName_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24: {
pageSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
case 32: {
responseContentType_ = input.readEnum();
bitField0_ |= 0x00000008;
break;
} // case 32
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object resourceName_ = "";
/**
* <pre>
* Required. The resource name of the batch job whose results are being
* listed.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return The resourceName.
*/
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The resource name of the batch job whose results are being
* listed.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for resourceName.
*/
public com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The resource name of the batch job whose results are being
* listed.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @param value The resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceName(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
resourceName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* Required. The resource name of the batch job whose results are being
* listed.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return This builder for chaining.
*/
public Builder clearResourceName() {
resourceName_ = getDefaultInstance().getResourceName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <pre>
* Required. The resource name of the batch job whose results are being
* listed.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @param value The bytes for resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
resourceName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
* <pre>
* Token of the page to retrieve. If not specified, the first
* page of results will be returned. Use the value obtained from
* `next_page_token` in the previous response in order to request
* the next page of results.
* </pre>
*
* <code>string page_token = 2;</code>
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Token of the page to retrieve. If not specified, the first
* page of results will be returned. Use the value obtained from
* `next_page_token` in the previous response in order to request
* the next page of results.
* </pre>
*
* <code>string page_token = 2;</code>
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString
getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Token of the page to retrieve. If not specified, the first
* page of results will be returned. Use the value obtained from
* `next_page_token` in the previous response in order to request
* the next page of results.
* </pre>
*
* <code>string page_token = 2;</code>
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
pageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Token of the page to retrieve. If not specified, the first
* page of results will be returned. Use the value obtained from
* `next_page_token` in the previous response in order to request
* the next page of results.
* </pre>
*
* <code>string page_token = 2;</code>
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* Token of the page to retrieve. If not specified, the first
* page of results will be returned. Use the value obtained from
* `next_page_token` in the previous response in order to request
* the next page of results.
* </pre>
*
* <code>string page_token = 2;</code>
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int pageSize_ ;
/**
* <pre>
* Number of elements to retrieve in a single page.
* When a page request is too large, the server may decide to
* further limit the number of returned resources.
* </pre>
*
* <code>int32 page_size = 3;</code>
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
* <pre>
* Number of elements to retrieve in a single page.
* When a page request is too large, the server may decide to
* further limit the number of returned resources.
* </pre>
*
* <code>int32 page_size = 3;</code>
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Number of elements to retrieve in a single page.
* When a page request is too large, the server may decide to
* further limit the number of returned resources.
* </pre>
*
* <code>int32 page_size = 3;</code>
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000004);
pageSize_ = 0;
onChanged();
return this;
}
private int responseContentType_ = 0;
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 4;</code>
* @return The enum numeric value on the wire for responseContentType.
*/
@java.lang.Override public int getResponseContentTypeValue() {
return responseContentType_;
}
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 4;</code>
* @param value The enum numeric value on the wire for responseContentType to set.
* @return This builder for chaining.
*/
public Builder setResponseContentTypeValue(int value) {
responseContentType_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 4;</code>
* @return The responseContentType.
*/
@java.lang.Override
public com.google.ads.googleads.v21.enums.ResponseContentTypeEnum.ResponseContentType getResponseContentType() {
com.google.ads.googleads.v21.enums.ResponseContentTypeEnum.ResponseContentType result = com.google.ads.googleads.v21.enums.ResponseContentTypeEnum.ResponseContentType.forNumber(responseContentType_);
return result == null ? com.google.ads.googleads.v21.enums.ResponseContentTypeEnum.ResponseContentType.UNRECOGNIZED : result;
}
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 4;</code>
* @param value The responseContentType to set.
* @return This builder for chaining.
*/
public Builder setResponseContentType(com.google.ads.googleads.v21.enums.ResponseContentTypeEnum.ResponseContentType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
responseContentType_ = value.getNumber();
onChanged();
return this;
}
/**
* <pre>
* The response content type setting. Determines whether the mutable resource
* or just the resource name should be returned.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.ResponseContentTypeEnum.ResponseContentType response_content_type = 4;</code>
* @return This builder for chaining.
*/
public Builder clearResponseContentType() {
bitField0_ = (bitField0_ & ~0x00000008);
responseContentType_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.services.ListBatchJobResultsRequest)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v21.services.ListBatchJobResultsRequest)
private static final com.google.ads.googleads.v21.services.ListBatchJobResultsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v21.services.ListBatchJobResultsRequest();
}
public static com.google.ads.googleads.v21.services.ListBatchJobResultsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListBatchJobResultsRequest>
PARSER = new com.google.protobuf.AbstractParser<ListBatchJobResultsRequest>() {
@java.lang.Override
public ListBatchJobResultsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListBatchJobResultsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListBatchJobResultsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.ListBatchJobResultsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,864 | java-workstations/proto-google-cloud-workstations-v1beta/src/main/java/com/google/cloud/workstations/v1beta/WorkstationConfigOrBuilder.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/workstations/v1beta/workstations.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.workstations.v1beta;
public interface WorkstationConfigOrBuilder
extends
// @@protoc_insertion_point(interface_extends:google.cloud.workstations.v1beta.WorkstationConfig)
com.google.protobuf.MessageOrBuilder {
/**
*
*
* <pre>
* Full name of this workstation configuration.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The name.
*/
java.lang.String getName();
/**
*
*
* <pre>
* Full name of this workstation configuration.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The bytes for name.
*/
com.google.protobuf.ByteString getNameBytes();
/**
*
*
* <pre>
* Optional. Human-readable name for this workstation configuration.
* </pre>
*
* <code>string display_name = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The displayName.
*/
java.lang.String getDisplayName();
/**
*
*
* <pre>
* Optional. Human-readable name for this workstation configuration.
* </pre>
*
* <code>string display_name = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for displayName.
*/
com.google.protobuf.ByteString getDisplayNameBytes();
/**
*
*
* <pre>
* Output only. A system-assigned unique identifier for this workstation
* configuration.
* </pre>
*
* <code>string uid = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The uid.
*/
java.lang.String getUid();
/**
*
*
* <pre>
* Output only. A system-assigned unique identifier for this workstation
* configuration.
* </pre>
*
* <code>string uid = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for uid.
*/
com.google.protobuf.ByteString getUidBytes();
/**
*
*
* <pre>
* Output only. Indicates whether this workstation configuration is currently
* being updated to match its intended state.
* </pre>
*
* <code>bool reconciling = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The reconciling.
*/
boolean getReconciling();
/**
*
*
* <pre>
* Optional. Client-specified annotations.
* </pre>
*
* <code>map<string, string> annotations = 5 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
int getAnnotationsCount();
/**
*
*
* <pre>
* Optional. Client-specified annotations.
* </pre>
*
* <code>map<string, string> annotations = 5 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
boolean containsAnnotations(java.lang.String key);
/** Use {@link #getAnnotationsMap()} instead. */
@java.lang.Deprecated
java.util.Map<java.lang.String, java.lang.String> getAnnotations();
/**
*
*
* <pre>
* Optional. Client-specified annotations.
* </pre>
*
* <code>map<string, string> annotations = 5 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
java.util.Map<java.lang.String, java.lang.String> getAnnotationsMap();
/**
*
*
* <pre>
* Optional. Client-specified annotations.
* </pre>
*
* <code>map<string, string> annotations = 5 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
/* nullable */
java.lang.String getAnnotationsOrDefault(
java.lang.String key,
/* nullable */
java.lang.String defaultValue);
/**
*
*
* <pre>
* Optional. Client-specified annotations.
* </pre>
*
* <code>map<string, string> annotations = 5 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
java.lang.String getAnnotationsOrThrow(java.lang.String key);
/**
*
*
* <pre>
* Optional.
* [Labels](https://cloud.google.com/workstations/docs/label-resources) that
* are applied to the workstation configuration and that are also propagated
* to the underlying Compute Engine resources.
* </pre>
*
* <code>map<string, string> labels = 18 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
int getLabelsCount();
/**
*
*
* <pre>
* Optional.
* [Labels](https://cloud.google.com/workstations/docs/label-resources) that
* are applied to the workstation configuration and that are also propagated
* to the underlying Compute Engine resources.
* </pre>
*
* <code>map<string, string> labels = 18 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
boolean containsLabels(java.lang.String key);
/** Use {@link #getLabelsMap()} instead. */
@java.lang.Deprecated
java.util.Map<java.lang.String, java.lang.String> getLabels();
/**
*
*
* <pre>
* Optional.
* [Labels](https://cloud.google.com/workstations/docs/label-resources) that
* are applied to the workstation configuration and that are also propagated
* to the underlying Compute Engine resources.
* </pre>
*
* <code>map<string, string> labels = 18 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
java.util.Map<java.lang.String, java.lang.String> getLabelsMap();
/**
*
*
* <pre>
* Optional.
* [Labels](https://cloud.google.com/workstations/docs/label-resources) that
* are applied to the workstation configuration and that are also propagated
* to the underlying Compute Engine resources.
* </pre>
*
* <code>map<string, string> labels = 18 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
/* nullable */
java.lang.String getLabelsOrDefault(
java.lang.String key,
/* nullable */
java.lang.String defaultValue);
/**
*
*
* <pre>
* Optional.
* [Labels](https://cloud.google.com/workstations/docs/label-resources) that
* are applied to the workstation configuration and that are also propagated
* to the underlying Compute Engine resources.
* </pre>
*
* <code>map<string, string> labels = 18 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
java.lang.String getLabelsOrThrow(java.lang.String key);
/**
*
*
* <pre>
* Output only. Time when this workstation configuration was created.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 6 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the createTime field is set.
*/
boolean hasCreateTime();
/**
*
*
* <pre>
* Output only. Time when this workstation configuration was created.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 6 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The createTime.
*/
com.google.protobuf.Timestamp getCreateTime();
/**
*
*
* <pre>
* Output only. Time when this workstation configuration was created.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 6 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder();
/**
*
*
* <pre>
* Output only. Time when this workstation configuration was most recently
* updated.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 7 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the updateTime field is set.
*/
boolean hasUpdateTime();
/**
*
*
* <pre>
* Output only. Time when this workstation configuration was most recently
* updated.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 7 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The updateTime.
*/
com.google.protobuf.Timestamp getUpdateTime();
/**
*
*
* <pre>
* Output only. Time when this workstation configuration was most recently
* updated.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 7 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder();
/**
*
*
* <pre>
* Output only. Time when this workstation configuration was soft-deleted.
* </pre>
*
* <code>.google.protobuf.Timestamp delete_time = 8 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the deleteTime field is set.
*/
boolean hasDeleteTime();
/**
*
*
* <pre>
* Output only. Time when this workstation configuration was soft-deleted.
* </pre>
*
* <code>.google.protobuf.Timestamp delete_time = 8 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The deleteTime.
*/
com.google.protobuf.Timestamp getDeleteTime();
/**
*
*
* <pre>
* Output only. Time when this workstation configuration was soft-deleted.
* </pre>
*
* <code>.google.protobuf.Timestamp delete_time = 8 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.protobuf.TimestampOrBuilder getDeleteTimeOrBuilder();
/**
*
*
* <pre>
* Optional. Checksum computed by the server. May be sent on update and delete
* requests to make sure that the client has an up-to-date value before
* proceeding.
* </pre>
*
* <code>string etag = 9 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The etag.
*/
java.lang.String getEtag();
/**
*
*
* <pre>
* Optional. Checksum computed by the server. May be sent on update and delete
* requests to make sure that the client has an up-to-date value before
* proceeding.
* </pre>
*
* <code>string etag = 9 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for etag.
*/
com.google.protobuf.ByteString getEtagBytes();
/**
*
*
* <pre>
* Optional. Number of seconds to wait before automatically stopping a
* workstation after it last received user traffic.
*
* A value of `"0s"` indicates that Cloud Workstations VMs created with this
* configuration should never time out due to idleness.
* Provide
* [duration](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#duration)
* terminated by `s` for seconds—for example, `"7200s"` (2 hours).
* The default is `"1200s"` (20 minutes).
* </pre>
*
* <code>.google.protobuf.Duration idle_timeout = 10 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the idleTimeout field is set.
*/
boolean hasIdleTimeout();
/**
*
*
* <pre>
* Optional. Number of seconds to wait before automatically stopping a
* workstation after it last received user traffic.
*
* A value of `"0s"` indicates that Cloud Workstations VMs created with this
* configuration should never time out due to idleness.
* Provide
* [duration](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#duration)
* terminated by `s` for seconds—for example, `"7200s"` (2 hours).
* The default is `"1200s"` (20 minutes).
* </pre>
*
* <code>.google.protobuf.Duration idle_timeout = 10 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The idleTimeout.
*/
com.google.protobuf.Duration getIdleTimeout();
/**
*
*
* <pre>
* Optional. Number of seconds to wait before automatically stopping a
* workstation after it last received user traffic.
*
* A value of `"0s"` indicates that Cloud Workstations VMs created with this
* configuration should never time out due to idleness.
* Provide
* [duration](https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#duration)
* terminated by `s` for seconds—for example, `"7200s"` (2 hours).
* The default is `"1200s"` (20 minutes).
* </pre>
*
* <code>.google.protobuf.Duration idle_timeout = 10 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.protobuf.DurationOrBuilder getIdleTimeoutOrBuilder();
/**
*
*
* <pre>
* Optional. Number of seconds that a workstation can run until it is
* automatically shut down. We recommend that workstations be shut down daily
* to reduce costs and so that security updates can be applied upon restart.
* The
* [idle_timeout][google.cloud.workstations.v1beta.WorkstationConfig.idle_timeout]
* and
* [running_timeout][google.cloud.workstations.v1beta.WorkstationConfig.running_timeout]
* fields are independent of each other. Note that the
* [running_timeout][google.cloud.workstations.v1beta.WorkstationConfig.running_timeout]
* field shuts down VMs after the specified time, regardless of whether or not
* the VMs are idle.
*
* Provide duration terminated by `s` for seconds—for example, `"54000s"`
* (15 hours). Defaults to `"43200s"` (12 hours). A value of `"0s"` indicates
* that workstations using this configuration should never time out. If
* [encryption_key][google.cloud.workstations.v1beta.WorkstationConfig.encryption_key]
* is set, it must be greater than `"0s"` and less than
* `"86400s"` (24 hours).
*
* Warning: A value of `"0s"` indicates that Cloud Workstations VMs created
* with this configuration have no maximum running time. This is strongly
* discouraged because you incur costs and will not pick up security updates.
* </pre>
*
* <code>.google.protobuf.Duration running_timeout = 11 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the runningTimeout field is set.
*/
boolean hasRunningTimeout();
/**
*
*
* <pre>
* Optional. Number of seconds that a workstation can run until it is
* automatically shut down. We recommend that workstations be shut down daily
* to reduce costs and so that security updates can be applied upon restart.
* The
* [idle_timeout][google.cloud.workstations.v1beta.WorkstationConfig.idle_timeout]
* and
* [running_timeout][google.cloud.workstations.v1beta.WorkstationConfig.running_timeout]
* fields are independent of each other. Note that the
* [running_timeout][google.cloud.workstations.v1beta.WorkstationConfig.running_timeout]
* field shuts down VMs after the specified time, regardless of whether or not
* the VMs are idle.
*
* Provide duration terminated by `s` for seconds—for example, `"54000s"`
* (15 hours). Defaults to `"43200s"` (12 hours). A value of `"0s"` indicates
* that workstations using this configuration should never time out. If
* [encryption_key][google.cloud.workstations.v1beta.WorkstationConfig.encryption_key]
* is set, it must be greater than `"0s"` and less than
* `"86400s"` (24 hours).
*
* Warning: A value of `"0s"` indicates that Cloud Workstations VMs created
* with this configuration have no maximum running time. This is strongly
* discouraged because you incur costs and will not pick up security updates.
* </pre>
*
* <code>.google.protobuf.Duration running_timeout = 11 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The runningTimeout.
*/
com.google.protobuf.Duration getRunningTimeout();
/**
*
*
* <pre>
* Optional. Number of seconds that a workstation can run until it is
* automatically shut down. We recommend that workstations be shut down daily
* to reduce costs and so that security updates can be applied upon restart.
* The
* [idle_timeout][google.cloud.workstations.v1beta.WorkstationConfig.idle_timeout]
* and
* [running_timeout][google.cloud.workstations.v1beta.WorkstationConfig.running_timeout]
* fields are independent of each other. Note that the
* [running_timeout][google.cloud.workstations.v1beta.WorkstationConfig.running_timeout]
* field shuts down VMs after the specified time, regardless of whether or not
* the VMs are idle.
*
* Provide duration terminated by `s` for seconds—for example, `"54000s"`
* (15 hours). Defaults to `"43200s"` (12 hours). A value of `"0s"` indicates
* that workstations using this configuration should never time out. If
* [encryption_key][google.cloud.workstations.v1beta.WorkstationConfig.encryption_key]
* is set, it must be greater than `"0s"` and less than
* `"86400s"` (24 hours).
*
* Warning: A value of `"0s"` indicates that Cloud Workstations VMs created
* with this configuration have no maximum running time. This is strongly
* discouraged because you incur costs and will not pick up security updates.
* </pre>
*
* <code>.google.protobuf.Duration running_timeout = 11 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.protobuf.DurationOrBuilder getRunningTimeoutOrBuilder();
/**
*
*
* <pre>
* Optional. Runtime host for the workstation.
* </pre>
*
* <code>
* .google.cloud.workstations.v1beta.WorkstationConfig.Host host = 12 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the host field is set.
*/
boolean hasHost();
/**
*
*
* <pre>
* Optional. Runtime host for the workstation.
* </pre>
*
* <code>
* .google.cloud.workstations.v1beta.WorkstationConfig.Host host = 12 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The host.
*/
com.google.cloud.workstations.v1beta.WorkstationConfig.Host getHost();
/**
*
*
* <pre>
* Optional. Runtime host for the workstation.
* </pre>
*
* <code>
* .google.cloud.workstations.v1beta.WorkstationConfig.Host host = 12 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.cloud.workstations.v1beta.WorkstationConfig.HostOrBuilder getHostOrBuilder();
/**
*
*
* <pre>
* Optional. Directories to persist across workstation sessions.
* </pre>
*
* <code>
* repeated .google.cloud.workstations.v1beta.WorkstationConfig.PersistentDirectory persistent_directories = 13 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
java.util.List<com.google.cloud.workstations.v1beta.WorkstationConfig.PersistentDirectory>
getPersistentDirectoriesList();
/**
*
*
* <pre>
* Optional. Directories to persist across workstation sessions.
* </pre>
*
* <code>
* repeated .google.cloud.workstations.v1beta.WorkstationConfig.PersistentDirectory persistent_directories = 13 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.cloud.workstations.v1beta.WorkstationConfig.PersistentDirectory
getPersistentDirectories(int index);
/**
*
*
* <pre>
* Optional. Directories to persist across workstation sessions.
* </pre>
*
* <code>
* repeated .google.cloud.workstations.v1beta.WorkstationConfig.PersistentDirectory persistent_directories = 13 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
int getPersistentDirectoriesCount();
/**
*
*
* <pre>
* Optional. Directories to persist across workstation sessions.
* </pre>
*
* <code>
* repeated .google.cloud.workstations.v1beta.WorkstationConfig.PersistentDirectory persistent_directories = 13 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
java.util.List<
? extends
com.google.cloud.workstations.v1beta.WorkstationConfig.PersistentDirectoryOrBuilder>
getPersistentDirectoriesOrBuilderList();
/**
*
*
* <pre>
* Optional. Directories to persist across workstation sessions.
* </pre>
*
* <code>
* repeated .google.cloud.workstations.v1beta.WorkstationConfig.PersistentDirectory persistent_directories = 13 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.cloud.workstations.v1beta.WorkstationConfig.PersistentDirectoryOrBuilder
getPersistentDirectoriesOrBuilder(int index);
/**
*
*
* <pre>
* Optional. Ephemeral directories which won't persist across workstation
* sessions.
* </pre>
*
* <code>
* repeated .google.cloud.workstations.v1beta.WorkstationConfig.EphemeralDirectory ephemeral_directories = 22 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
java.util.List<com.google.cloud.workstations.v1beta.WorkstationConfig.EphemeralDirectory>
getEphemeralDirectoriesList();
/**
*
*
* <pre>
* Optional. Ephemeral directories which won't persist across workstation
* sessions.
* </pre>
*
* <code>
* repeated .google.cloud.workstations.v1beta.WorkstationConfig.EphemeralDirectory ephemeral_directories = 22 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.cloud.workstations.v1beta.WorkstationConfig.EphemeralDirectory getEphemeralDirectories(
int index);
/**
*
*
* <pre>
* Optional. Ephemeral directories which won't persist across workstation
* sessions.
* </pre>
*
* <code>
* repeated .google.cloud.workstations.v1beta.WorkstationConfig.EphemeralDirectory ephemeral_directories = 22 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
int getEphemeralDirectoriesCount();
/**
*
*
* <pre>
* Optional. Ephemeral directories which won't persist across workstation
* sessions.
* </pre>
*
* <code>
* repeated .google.cloud.workstations.v1beta.WorkstationConfig.EphemeralDirectory ephemeral_directories = 22 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
java.util.List<
? extends
com.google.cloud.workstations.v1beta.WorkstationConfig.EphemeralDirectoryOrBuilder>
getEphemeralDirectoriesOrBuilderList();
/**
*
*
* <pre>
* Optional. Ephemeral directories which won't persist across workstation
* sessions.
* </pre>
*
* <code>
* repeated .google.cloud.workstations.v1beta.WorkstationConfig.EphemeralDirectory ephemeral_directories = 22 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.cloud.workstations.v1beta.WorkstationConfig.EphemeralDirectoryOrBuilder
getEphemeralDirectoriesOrBuilder(int index);
/**
*
*
* <pre>
* Optional. Container that runs upon startup for each workstation using this
* workstation configuration.
* </pre>
*
* <code>
* .google.cloud.workstations.v1beta.WorkstationConfig.Container container = 14 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the container field is set.
*/
boolean hasContainer();
/**
*
*
* <pre>
* Optional. Container that runs upon startup for each workstation using this
* workstation configuration.
* </pre>
*
* <code>
* .google.cloud.workstations.v1beta.WorkstationConfig.Container container = 14 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The container.
*/
com.google.cloud.workstations.v1beta.WorkstationConfig.Container getContainer();
/**
*
*
* <pre>
* Optional. Container that runs upon startup for each workstation using this
* workstation configuration.
* </pre>
*
* <code>
* .google.cloud.workstations.v1beta.WorkstationConfig.Container container = 14 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.cloud.workstations.v1beta.WorkstationConfig.ContainerOrBuilder getContainerOrBuilder();
/**
*
*
* <pre>
* Immutable. Encrypts resources of this workstation configuration using a
* customer-managed encryption key (CMEK).
*
* If specified, the boot disk of the Compute Engine instance and the
* persistent disk are encrypted using this encryption key. If
* this field is not set, the disks are encrypted using a generated
* key. Customer-managed encryption keys do not protect disk metadata.
*
* If the customer-managed encryption key is rotated, when the workstation
* instance is stopped, the system attempts to recreate the
* persistent disk with the new version of the key. Be sure to keep
* older versions of the key until the persistent disk is recreated.
* Otherwise, data on the persistent disk might be lost.
*
* If the encryption key is revoked, the workstation session automatically
* stops within 7 hours.
*
* Immutable after the workstation configuration is created.
* </pre>
*
* <code>
* .google.cloud.workstations.v1beta.WorkstationConfig.CustomerEncryptionKey encryption_key = 17 [(.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @return Whether the encryptionKey field is set.
*/
boolean hasEncryptionKey();
/**
*
*
* <pre>
* Immutable. Encrypts resources of this workstation configuration using a
* customer-managed encryption key (CMEK).
*
* If specified, the boot disk of the Compute Engine instance and the
* persistent disk are encrypted using this encryption key. If
* this field is not set, the disks are encrypted using a generated
* key. Customer-managed encryption keys do not protect disk metadata.
*
* If the customer-managed encryption key is rotated, when the workstation
* instance is stopped, the system attempts to recreate the
* persistent disk with the new version of the key. Be sure to keep
* older versions of the key until the persistent disk is recreated.
* Otherwise, data on the persistent disk might be lost.
*
* If the encryption key is revoked, the workstation session automatically
* stops within 7 hours.
*
* Immutable after the workstation configuration is created.
* </pre>
*
* <code>
* .google.cloud.workstations.v1beta.WorkstationConfig.CustomerEncryptionKey encryption_key = 17 [(.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @return The encryptionKey.
*/
com.google.cloud.workstations.v1beta.WorkstationConfig.CustomerEncryptionKey getEncryptionKey();
/**
*
*
* <pre>
* Immutable. Encrypts resources of this workstation configuration using a
* customer-managed encryption key (CMEK).
*
* If specified, the boot disk of the Compute Engine instance and the
* persistent disk are encrypted using this encryption key. If
* this field is not set, the disks are encrypted using a generated
* key. Customer-managed encryption keys do not protect disk metadata.
*
* If the customer-managed encryption key is rotated, when the workstation
* instance is stopped, the system attempts to recreate the
* persistent disk with the new version of the key. Be sure to keep
* older versions of the key until the persistent disk is recreated.
* Otherwise, data on the persistent disk might be lost.
*
* If the encryption key is revoked, the workstation session automatically
* stops within 7 hours.
*
* Immutable after the workstation configuration is created.
* </pre>
*
* <code>
* .google.cloud.workstations.v1beta.WorkstationConfig.CustomerEncryptionKey encryption_key = 17 [(.google.api.field_behavior) = IMMUTABLE];
* </code>
*/
com.google.cloud.workstations.v1beta.WorkstationConfig.CustomerEncryptionKeyOrBuilder
getEncryptionKeyOrBuilder();
/**
*
*
* <pre>
* Optional. Readiness checks to perform when starting a workstation using
* this workstation configuration. Mark a workstation as running only after
* all specified readiness checks return 200 status codes.
* </pre>
*
* <code>
* repeated .google.cloud.workstations.v1beta.WorkstationConfig.ReadinessCheck readiness_checks = 19 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
java.util.List<com.google.cloud.workstations.v1beta.WorkstationConfig.ReadinessCheck>
getReadinessChecksList();
/**
*
*
* <pre>
* Optional. Readiness checks to perform when starting a workstation using
* this workstation configuration. Mark a workstation as running only after
* all specified readiness checks return 200 status codes.
* </pre>
*
* <code>
* repeated .google.cloud.workstations.v1beta.WorkstationConfig.ReadinessCheck readiness_checks = 19 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.cloud.workstations.v1beta.WorkstationConfig.ReadinessCheck getReadinessChecks(
int index);
/**
*
*
* <pre>
* Optional. Readiness checks to perform when starting a workstation using
* this workstation configuration. Mark a workstation as running only after
* all specified readiness checks return 200 status codes.
* </pre>
*
* <code>
* repeated .google.cloud.workstations.v1beta.WorkstationConfig.ReadinessCheck readiness_checks = 19 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
int getReadinessChecksCount();
/**
*
*
* <pre>
* Optional. Readiness checks to perform when starting a workstation using
* this workstation configuration. Mark a workstation as running only after
* all specified readiness checks return 200 status codes.
* </pre>
*
* <code>
* repeated .google.cloud.workstations.v1beta.WorkstationConfig.ReadinessCheck readiness_checks = 19 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
java.util.List<
? extends com.google.cloud.workstations.v1beta.WorkstationConfig.ReadinessCheckOrBuilder>
getReadinessChecksOrBuilderList();
/**
*
*
* <pre>
* Optional. Readiness checks to perform when starting a workstation using
* this workstation configuration. Mark a workstation as running only after
* all specified readiness checks return 200 status codes.
* </pre>
*
* <code>
* repeated .google.cloud.workstations.v1beta.WorkstationConfig.ReadinessCheck readiness_checks = 19 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
com.google.cloud.workstations.v1beta.WorkstationConfig.ReadinessCheckOrBuilder
getReadinessChecksOrBuilder(int index);
/**
*
*
* <pre>
* Optional. Immutable. Specifies the zones used to replicate the VM and disk
* resources within the region. If set, exactly two zones within the
* workstation cluster's region must be specified—for example,
* `['us-central1-a', 'us-central1-f']`. If this field is empty, two default
* zones within the region are used.
*
* Immutable after the workstation configuration is created.
* </pre>
*
* <code>
* repeated string replica_zones = 23 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @return A list containing the replicaZones.
*/
java.util.List<java.lang.String> getReplicaZonesList();
/**
*
*
* <pre>
* Optional. Immutable. Specifies the zones used to replicate the VM and disk
* resources within the region. If set, exactly two zones within the
* workstation cluster's region must be specified—for example,
* `['us-central1-a', 'us-central1-f']`. If this field is empty, two default
* zones within the region are used.
*
* Immutable after the workstation configuration is created.
* </pre>
*
* <code>
* repeated string replica_zones = 23 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @return The count of replicaZones.
*/
int getReplicaZonesCount();
/**
*
*
* <pre>
* Optional. Immutable. Specifies the zones used to replicate the VM and disk
* resources within the region. If set, exactly two zones within the
* workstation cluster's region must be specified—for example,
* `['us-central1-a', 'us-central1-f']`. If this field is empty, two default
* zones within the region are used.
*
* Immutable after the workstation configuration is created.
* </pre>
*
* <code>
* repeated string replica_zones = 23 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @param index The index of the element to return.
* @return The replicaZones at the given index.
*/
java.lang.String getReplicaZones(int index);
/**
*
*
* <pre>
* Optional. Immutable. Specifies the zones used to replicate the VM and disk
* resources within the region. If set, exactly two zones within the
* workstation cluster's region must be specified—for example,
* `['us-central1-a', 'us-central1-f']`. If this field is empty, two default
* zones within the region are used.
*
* Immutable after the workstation configuration is created.
* </pre>
*
* <code>
* repeated string replica_zones = 23 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @param index The index of the value to return.
* @return The bytes of the replicaZones at the given index.
*/
com.google.protobuf.ByteString getReplicaZonesBytes(int index);
/**
*
*
* <pre>
* Output only. Whether this resource is degraded, in which case it may
* require user action to restore full functionality. See also the
* [conditions][google.cloud.workstations.v1beta.WorkstationConfig.conditions]
* field.
* </pre>
*
* <code>bool degraded = 15 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The degraded.
*/
boolean getDegraded();
/**
*
*
* <pre>
* Output only. Status conditions describing the current resource state.
* </pre>
*
* <code>repeated .google.rpc.Status conditions = 16 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
java.util.List<com.google.rpc.Status> getConditionsList();
/**
*
*
* <pre>
* Output only. Status conditions describing the current resource state.
* </pre>
*
* <code>repeated .google.rpc.Status conditions = 16 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.rpc.Status getConditions(int index);
/**
*
*
* <pre>
* Output only. Status conditions describing the current resource state.
* </pre>
*
* <code>repeated .google.rpc.Status conditions = 16 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
int getConditionsCount();
/**
*
*
* <pre>
* Output only. Status conditions describing the current resource state.
* </pre>
*
* <code>repeated .google.rpc.Status conditions = 16 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
java.util.List<? extends com.google.rpc.StatusOrBuilder> getConditionsOrBuilderList();
/**
*
*
* <pre>
* Output only. Status conditions describing the current resource state.
* </pre>
*
* <code>repeated .google.rpc.Status conditions = 16 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
com.google.rpc.StatusOrBuilder getConditionsOrBuilder(int index);
/**
*
*
* <pre>
* Optional. Whether to enable Linux `auditd` logging on the workstation. When
* enabled, a service account must also be specified that has
* `logging.buckets.write` permission on the project. Operating system audit
* logging is distinct from [Cloud Audit
* Logs](https://cloud.google.com/workstations/docs/audit-logging).
* </pre>
*
* <code>bool enable_audit_agent = 20 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The enableAuditAgent.
*/
boolean getEnableAuditAgent();
}
|
googleapis/google-cloud-java | 36,019 | java-shopping-merchant-lfp/proto-google-shopping-merchant-lfp-v1beta/src/main/java/com/google/shopping/merchant/lfp/v1beta/ListLfpStoresRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/shopping/merchant/lfp/v1beta/lfpstore.proto
// Protobuf Java Version: 3.25.8
package com.google.shopping.merchant.lfp.v1beta;
/**
*
*
* <pre>
* Request message for the ListLfpStores method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest}
*/
public final class ListLfpStoresRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest)
ListLfpStoresRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListLfpStoresRequest.newBuilder() to construct.
private ListLfpStoresRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListLfpStoresRequest() {
parent_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListLfpStoresRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.lfp.v1beta.LfpStoreProto
.internal_static_google_shopping_merchant_lfp_v1beta_ListLfpStoresRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.lfp.v1beta.LfpStoreProto
.internal_static_google_shopping_merchant_lfp_v1beta_ListLfpStoresRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest.class,
com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The LFP partner.
* Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The LFP partner.
* Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TARGET_ACCOUNT_FIELD_NUMBER = 2;
private long targetAccount_ = 0L;
/**
*
*
* <pre>
* Required. The Merchant Center id of the merchant to list stores for.
* </pre>
*
* <code>int64 target_account = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The targetAccount.
*/
@java.lang.Override
public long getTargetAccount() {
return targetAccount_;
}
public static final int PAGE_SIZE_FIELD_NUMBER = 3;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Optional. The maximum number of `LfpStore` resources for the given account
* to return. The service returns fewer than this value if the number of
* stores for the given account is less than the `pageSize`. The default value
* is 250. The maximum value is 1000; If a value higher than the maximum is
* specified, then the `pageSize` will default to the maximum.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListLfpStoresRequest`
* call. Provide the page token to retrieve the subsequent page. When
* paginating, all other parameters provided to `ListLfpStoresRequest` must
* match the call that provided the page token. The token returned as
* [nextPageToken][google.shopping.merchant.lfp.v1beta.ListLfpStoresResponse.next_page_token]
* in the response to the previous request.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListLfpStoresRequest`
* call. Provide the page token to retrieve the subsequent page. When
* paginating, all other parameters provided to `ListLfpStoresRequest` must
* match the call that provided the page token. The token returned as
* [nextPageToken][google.shopping.merchant.lfp.v1beta.ListLfpStoresResponse.next_page_token]
* in the response to the previous request.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (targetAccount_ != 0L) {
output.writeInt64(2, targetAccount_);
}
if (pageSize_ != 0) {
output.writeInt32(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, pageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (targetAccount_ != 0L) {
size += com.google.protobuf.CodedOutputStream.computeInt64Size(2, targetAccount_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, pageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest)) {
return super.equals(obj);
}
com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest other =
(com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getTargetAccount() != other.getTargetAccount()) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + TARGET_ACCOUNT_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getTargetAccount());
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for the ListLfpStores method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest)
com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.lfp.v1beta.LfpStoreProto
.internal_static_google_shopping_merchant_lfp_v1beta_ListLfpStoresRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.lfp.v1beta.LfpStoreProto
.internal_static_google_shopping_merchant_lfp_v1beta_ListLfpStoresRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest.class,
com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest.Builder.class);
}
// Construct using com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
targetAccount_ = 0L;
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.shopping.merchant.lfp.v1beta.LfpStoreProto
.internal_static_google_shopping_merchant_lfp_v1beta_ListLfpStoresRequest_descriptor;
}
@java.lang.Override
public com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest
getDefaultInstanceForType() {
return com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest build() {
com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest buildPartial() {
com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest result =
new com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.targetAccount_ = targetAccount_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest) {
return mergeFrom((com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest other) {
if (other
== com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getTargetAccount() != 0L) {
setTargetAccount(other.getTargetAccount());
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
targetAccount_ = input.readInt64();
bitField0_ |= 0x00000002;
break;
} // case 16
case 24:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
case 34:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The LFP partner.
* Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The LFP partner.
* Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The LFP partner.
* Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The LFP partner.
* Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The LFP partner.
* Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private long targetAccount_;
/**
*
*
* <pre>
* Required. The Merchant Center id of the merchant to list stores for.
* </pre>
*
* <code>int64 target_account = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The targetAccount.
*/
@java.lang.Override
public long getTargetAccount() {
return targetAccount_;
}
/**
*
*
* <pre>
* Required. The Merchant Center id of the merchant to list stores for.
* </pre>
*
* <code>int64 target_account = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The targetAccount to set.
* @return This builder for chaining.
*/
public Builder setTargetAccount(long value) {
targetAccount_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Merchant Center id of the merchant to list stores for.
* </pre>
*
* <code>int64 target_account = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearTargetAccount() {
bitField0_ = (bitField0_ & ~0x00000002);
targetAccount_ = 0L;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. The maximum number of `LfpStore` resources for the given account
* to return. The service returns fewer than this value if the number of
* stores for the given account is less than the `pageSize`. The default value
* is 250. The maximum value is 1000; If a value higher than the maximum is
* specified, then the `pageSize` will default to the maximum.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. The maximum number of `LfpStore` resources for the given account
* to return. The service returns fewer than this value if the number of
* stores for the given account is less than the `pageSize`. The default value
* is 250. The maximum value is 1000; If a value higher than the maximum is
* specified, then the `pageSize` will default to the maximum.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The maximum number of `LfpStore` resources for the given account
* to return. The service returns fewer than this value if the number of
* stores for the given account is less than the `pageSize`. The default value
* is 250. The maximum value is 1000; If a value higher than the maximum is
* specified, then the `pageSize` will default to the maximum.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000004);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListLfpStoresRequest`
* call. Provide the page token to retrieve the subsequent page. When
* paginating, all other parameters provided to `ListLfpStoresRequest` must
* match the call that provided the page token. The token returned as
* [nextPageToken][google.shopping.merchant.lfp.v1beta.ListLfpStoresResponse.next_page_token]
* in the response to the previous request.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListLfpStoresRequest`
* call. Provide the page token to retrieve the subsequent page. When
* paginating, all other parameters provided to `ListLfpStoresRequest` must
* match the call that provided the page token. The token returned as
* [nextPageToken][google.shopping.merchant.lfp.v1beta.ListLfpStoresResponse.next_page_token]
* in the response to the previous request.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListLfpStoresRequest`
* call. Provide the page token to retrieve the subsequent page. When
* paginating, all other parameters provided to `ListLfpStoresRequest` must
* match the call that provided the page token. The token returned as
* [nextPageToken][google.shopping.merchant.lfp.v1beta.ListLfpStoresResponse.next_page_token]
* in the response to the previous request.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListLfpStoresRequest`
* call. Provide the page token to retrieve the subsequent page. When
* paginating, all other parameters provided to `ListLfpStoresRequest` must
* match the call that provided the page token. The token returned as
* [nextPageToken][google.shopping.merchant.lfp.v1beta.ListLfpStoresResponse.next_page_token]
* in the response to the previous request.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListLfpStoresRequest`
* call. Provide the page token to retrieve the subsequent page. When
* paginating, all other parameters provided to `ListLfpStoresRequest` must
* match the call that provided the page token. The token returned as
* [nextPageToken][google.shopping.merchant.lfp.v1beta.ListLfpStoresResponse.next_page_token]
* in the response to the previous request.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest)
}
// @@protoc_insertion_point(class_scope:google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest)
private static final com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest();
}
public static com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListLfpStoresRequest> PARSER =
new com.google.protobuf.AbstractParser<ListLfpStoresRequest>() {
@java.lang.Override
public ListLfpStoresRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListLfpStoresRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListLfpStoresRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.shopping.merchant.lfp.v1beta.ListLfpStoresRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
hibernate/hibernate-orm | 34,221 | hibernate-core/src/main/java/org/hibernate/boot/model/internal/GeneratorBinder.java | /*
* SPDX-License-Identifier: Apache-2.0
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.boot.model.internal;
import jakarta.persistence.GeneratedValue;
import jakarta.persistence.GenerationType;
import jakarta.persistence.SequenceGenerator;
import jakarta.persistence.TableGenerator;
import jakarta.persistence.Version;
import org.hibernate.AnnotationException;
import org.hibernate.AssertionFailure;
import org.hibernate.MappingException;
import org.hibernate.annotations.GenericGenerator;
import org.hibernate.annotations.IdGeneratorType;
import org.hibernate.annotations.ValueGenerationType;
import org.hibernate.boot.model.IdentifierGeneratorDefinition;
import org.hibernate.boot.model.relational.ExportableProducer;
import org.hibernate.boot.model.source.internal.hbm.MappingDocument;
import org.hibernate.boot.models.spi.GlobalRegistrar;
import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.boot.spi.PropertyData;
import org.hibernate.engine.config.spi.ConfigurationService;
import org.hibernate.generator.AnnotationBasedGenerator;
import org.hibernate.generator.Assigned;
import org.hibernate.generator.BeforeExecutionGenerator;
import org.hibernate.generator.Generator;
import org.hibernate.generator.GeneratorCreationContext;
import org.hibernate.generator.OnExecutionGenerator;
import org.hibernate.id.Configurable;
import org.hibernate.id.IdentifierGenerator;
import org.hibernate.id.IdentityGenerator;
import org.hibernate.id.PersistentIdentifierGenerator;
import org.hibernate.id.enhanced.SequenceStyleGenerator;
import org.hibernate.id.uuid.UuidValueGenerator;
import org.hibernate.mapping.GeneratorCreator;
import org.hibernate.mapping.KeyValue;
import org.hibernate.mapping.PersistentClass;
import org.hibernate.mapping.SimpleValue;
import org.hibernate.mapping.Value;
import org.hibernate.models.spi.AnnotationTarget;
import org.hibernate.models.spi.MemberDetails;
import org.hibernate.resource.beans.container.spi.BeanContainer;
import org.hibernate.resource.beans.internal.Helper;
import java.lang.annotation.Annotation;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Member;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import static jakarta.persistence.GenerationType.AUTO;
import static java.util.Collections.emptyMap;
import static org.hibernate.boot.model.internal.AnnotationHelper.extractParameterMap;
import static org.hibernate.boot.model.internal.BinderHelper.getPath;
import static org.hibernate.boot.model.internal.BinderHelper.isGlobalGeneratorNameGlobal;
import static org.hibernate.boot.model.internal.GeneratorParameters.collectParameters;
import static org.hibernate.boot.model.internal.GeneratorParameters.interpretSequenceGenerator;
import static org.hibernate.boot.model.internal.GeneratorParameters.interpretTableGenerator;
import static org.hibernate.boot.model.internal.GeneratorStrategies.generatorClass;
import static org.hibernate.id.IdentifierGenerator.GENERATOR_NAME;
import static org.hibernate.boot.BootLogging.BOOT_LOGGER;
import static org.hibernate.internal.util.NullnessUtil.castNonNull;
import static org.hibernate.internal.util.StringHelper.isNotEmpty;
import static org.hibernate.internal.util.StringHelper.qualify;
import static org.hibernate.internal.util.collections.CollectionHelper.combineUntyped;
import static org.hibernate.resource.beans.internal.Helper.getBean;
/**
* Responsible for configuring and instantiating {@link Generator}s.
*
* @author Gavin King
*/
public class GeneratorBinder {
public static final String ASSIGNED_GENERATOR_NAME = "assigned";
public static final GeneratorCreator ASSIGNED_IDENTIFIER_GENERATOR_CREATOR =
new GeneratorCreator() {
@Override
public Generator createGenerator(GeneratorCreationContext context) {
return new Assigned();
}
@Override
public boolean isAssigned() {
return true;
}
};
/**
* Create a generator, based on a {@link GeneratedValue} annotation.
*/
public static void makeIdGenerator(
SimpleValue identifierValue,
MemberDetails idMember,
String generatorType,
String generatorName,
MetadataBuildingContext context,
Map<String, ? extends IdentifierGeneratorDefinition> localGenerators) {
//generator settings
final var configuration = initializeGeneratorSettings( identifierValue, generatorName );
final String generatorStrategy;
if ( generatorName.isEmpty() ) {
if ( idMember.hasDirectAnnotationUsage( GeneratedValue.class )
&& handleDefaultGenerator( identifierValue, context, localGenerators, idMember, configuration ) ) {
// we found an appropriate a "default" generator (as per JPA 3.2)
return; // EARLY EXIT
}
else {
generatorStrategy = generatorType;
}
}
else if ( generatorName.isBlank() ) {
throw new MappingException( "Generator name is cannot be blank" );
}
else {
//we have a named generator
generatorStrategy = determineStrategy(
idMember,
generatorType,
generatorName,
context,
localGenerators,
configuration
);
}
setGeneratorCreator( identifierValue, configuration, generatorStrategy, context );
}
/**
* Called if {@link @GeneratedValue} specified no name.
* This is a new special case added in JPA 3.2.
* We look for an appropriate matching "default generator recipe"
* based on the {@link GenerationType}.
*/
private static boolean handleDefaultGenerator(
SimpleValue identifierValue,
MetadataBuildingContext context,
Map<String, ? extends IdentifierGeneratorDefinition> localGenerators,
MemberDetails idMember,
Map<String, Object> configuration) {
final var strategy = idMember.getDirectAnnotationUsage( GeneratedValue.class ).strategy();
final String strategyGeneratorClassName = correspondingGeneratorName( strategy );
final var impliedGenerator =
determineImpliedGenerator( strategy, strategyGeneratorClassName, localGenerators );
if ( impliedGenerator != null ) {
configuration.putAll( impliedGenerator.getParameters() );
instantiateNamedStrategyGenerator( identifierValue, strategyGeneratorClassName, configuration, context );
return true;
}
else {
return false;
}
}
private static Map<String, Object> initializeGeneratorSettings(SimpleValue identifierValue, String generatorName) {
final Map<String,Object> configuration = new HashMap<>();
configuration.put( GENERATOR_NAME, generatorName );
configuration.put( PersistentIdentifierGenerator.TABLE, identifierValue.getTable().getName() );
if ( identifierValue.getColumnSpan() == 1 ) {
configuration.put( PersistentIdentifierGenerator.PK, identifierValue.getColumns().get(0).getName() );
}
return configuration;
}
private static IdentifierGeneratorDefinition determineImpliedGenerator(
GenerationType strategy,
String strategyGeneratorClassName,
Map<String, ? extends IdentifierGeneratorDefinition> localGenerators) {
if ( localGenerators == null ) {
return null;
}
if ( localGenerators.size() == 1 ) {
final var generatorDefinition = localGenerators.values().iterator().next();
// NOTE: a little bit of a special rule here for the case of just one -
// consider it a match, based on strategy, if the strategy is AUTO or matches
if ( strategy == AUTO
|| isImpliedGenerator( strategy, strategyGeneratorClassName, generatorDefinition ) ) {
return generatorDefinition;
}
}
return matchingLocalGenerator( strategy, strategyGeneratorClassName, localGenerators );
}
private static IdentifierGeneratorDefinition matchingLocalGenerator(
GenerationType strategy,
String strategyGeneratorClassName,
Map<String, ? extends IdentifierGeneratorDefinition> localGenerators) {
IdentifierGeneratorDefinition matching = null;
for ( var localGenerator : localGenerators.values() ) {
if ( isImpliedGenerator( strategy, strategyGeneratorClassName, localGenerator ) ) {
if ( matching != null ) {
// we found multiple matching generators
return null;
}
matching = localGenerator;
}
}
return matching;
}
private static boolean isImpliedGenerator(
GenerationType strategy,
String strategyGeneratorClassName,
IdentifierGeneratorDefinition generatorDefinition) {
return generatorDefinition.getStrategy().equals( strategyGeneratorClassName );
}
private static String correspondingGeneratorName(GenerationType strategy) {
return switch ( strategy ) {
// case UUID -> org.hibernate.id.uuid.UuidGenerator.class.getName();
case UUID -> UuidValueGenerator.class.getName();
case TABLE -> org.hibernate.id.enhanced.TableGenerator.class.getName();
case IDENTITY -> null;
default -> SequenceStyleGenerator.class.getName();
};
}
private static String determineStrategy(
MemberDetails idAttributeMember,
String generatorType,
String generatorName,
MetadataBuildingContext context,
Map<String, ? extends IdentifierGeneratorDefinition> localGenerators,
Map<String, Object> configuration) {
final var definition =
makeIdentifierGeneratorDefinition( generatorName, idAttributeMember, localGenerators, context );
if ( definition == null ) {
throw new AnnotationException( "No id generator was declared with the name '" + generatorName
+ "' specified by '@GeneratedValue'"
+ " (define a named generator using '@SequenceGenerator', '@TableGenerator', or '@GenericGenerator')" );
}
configuration.putAll( definition.getParameters() );
// This is quite vague in the spec, but a generator could override the generator choice
return generatorStrategy( generatorType, definition );
}
private static String generatorStrategy(String generatorType, IdentifierGeneratorDefinition definition) {
return generatorType != null
// Yuck! this is a hack to not override 'AUTO',
// even if GeneratedValue.generator is specified
&& definition.getStrategy().equals( "identity" )
? generatorType
: definition.getStrategy();
}
private static IdentifierGeneratorDefinition makeIdentifierGeneratorDefinition(
String name,
MemberDetails idAttributeMember,
Map<String, ? extends IdentifierGeneratorDefinition> localGenerators,
MetadataBuildingContext buildingContext) {
if ( localGenerators != null ) {
final var result = localGenerators.get( name );
if ( result != null ) {
return result;
}
}
final var globalDefinition =
buildingContext.getMetadataCollector()
.getIdentifierGenerator( name );
if ( globalDefinition != null ) {
return globalDefinition;
}
else {
final var generatedValue = idAttributeMember.getDirectAnnotationUsage( GeneratedValue.class );
if ( generatedValue == null ) {
throw new AssertionFailure( "No @GeneratedValue annotation" );
}
return IdentifierGeneratorDefinition.createImplicit(
name,
idAttributeMember.getType(),
generatedValue.generator(),
interpretGenerationType( generatedValue )
);
}
}
private static GenerationType interpretGenerationType(GeneratedValue generatedValueAnn) {
// todo (jpa32) : when can this ever be null?
final var strategy = generatedValueAnn.strategy();
return strategy == null ? AUTO : strategy;
}
public static void visitIdGeneratorDefinitions(
AnnotationTarget annotatedElement,
Consumer<IdentifierGeneratorDefinition> consumer,
MetadataBuildingContext buildingContext) {
final var modelsContext = buildingContext.getBootstrapContext().getModelsContext();
annotatedElement.forEachAnnotationUsage( TableGenerator.class, modelsContext,
usage -> consumer.accept( buildTableIdGenerator( usage ) ) );
annotatedElement.forEachAnnotationUsage( SequenceGenerator.class, modelsContext,
usage -> consumer.accept( buildSequenceIdGenerator( usage ) ) );
annotatedElement.forEachAnnotationUsage( GenericGenerator.class, modelsContext,
usage -> consumer.accept( buildIdGenerator( usage ) ) );
}
public static void registerGlobalGenerators(
AnnotationTarget annotatedElement,
MetadataBuildingContext context) {
if ( context.getBootstrapContext().getJpaCompliance().isGlobalGeneratorScopeEnabled() ) {
final var metadataCollector = context.getMetadataCollector();
visitIdGeneratorDefinitions(
annotatedElement,
definition -> {
if ( !definition.getName().isEmpty() ) {
metadataCollector.addIdentifierGenerator( definition );
}
},
context
);
}
}
private static IdentifierGeneratorDefinition buildIdGenerator(GenericGenerator generatorAnnotation) {
final var definitionBuilder = genericDefinitionBuilder( generatorAnnotation );
if ( BOOT_LOGGER.isTraceEnabled() ) {
BOOT_LOGGER.addedGenerator( definitionBuilder.getName(), definitionBuilder.getStrategy() );
}
return definitionBuilder.build();
}
private static IdentifierGeneratorDefinition.Builder genericDefinitionBuilder(GenericGenerator generatorAnnotation) {
final var definitionBuilder = new IdentifierGeneratorDefinition.Builder();
definitionBuilder.setName( generatorAnnotation.name() );
final var generatorClass = generatorAnnotation.type();
final String strategy =
generatorClass.equals( Generator.class )
? generatorAnnotation.strategy()
: generatorClass.getName();
definitionBuilder.setStrategy( strategy );
definitionBuilder.addParams( extractParameterMap( generatorAnnotation.parameters() ) );
return definitionBuilder;
}
private static IdentifierGeneratorDefinition buildSequenceIdGenerator(SequenceGenerator generatorAnnotation) {
final var definitionBuilder = new IdentifierGeneratorDefinition.Builder();
interpretSequenceGenerator( generatorAnnotation, definitionBuilder );
if ( BOOT_LOGGER.isTraceEnabled() ) {
BOOT_LOGGER.addedSequenceGenerator( definitionBuilder.getName() );
}
return definitionBuilder.build();
}
private static IdentifierGeneratorDefinition buildTableIdGenerator(TableGenerator generatorAnnotation) {
final var definitionBuilder = new IdentifierGeneratorDefinition.Builder();
interpretTableGenerator( generatorAnnotation, definitionBuilder );
if ( BOOT_LOGGER.isTraceEnabled() ) {
BOOT_LOGGER.addedTableGenerator( definitionBuilder.getName() );
}
return definitionBuilder.build();
}
private static void checkGeneratorClass(Class<? extends Generator> generatorClass) {
if ( !BeforeExecutionGenerator.class.isAssignableFrom( generatorClass )
&& !OnExecutionGenerator.class.isAssignableFrom( generatorClass ) ) {
throw new MappingException( "Generator class '" + generatorClass.getName()
+ "' must implement either 'BeforeExecutionGenerator' or 'OnExecutionGenerator'" );
}
}
private static void checkGeneratorInterfaces(Class<? extends Generator> generatorClass) {
// A regular value generator should not implement legacy IdentifierGenerator
if ( IdentifierGenerator.class.isAssignableFrom( generatorClass ) ) {
throw new AnnotationException( "Generator class '" + generatorClass.getName()
+ "' implements 'IdentifierGenerator' and may not be used with '@ValueGenerationType'" );
}
}
/**
* Return a {@link GeneratorCreator} for an attribute annotated
* with a {@linkplain ValueGenerationType generator annotation}.
*/
private static GeneratorCreator generatorCreator(
MemberDetails memberDetails,
Value value,
Annotation annotation,
BeanContainer beanContainer) {
final var annotationType = annotation.annotationType();
final var generatorAnnotation = annotationType.getAnnotation( ValueGenerationType.class );
assert generatorAnnotation != null;
final var generatorClass = generatorAnnotation.generatedBy();
checkGeneratorClass( generatorClass );
checkGeneratorInterfaces( generatorClass );
return creationContext -> {
final var generator =
instantiateAndInitializeGenerator(
value,
annotation,
beanContainer,
creationContext,
generatorClass,
memberDetails,
annotationType
);
checkVersionGenerationAlways( memberDetails, generator );
return generator;
};
}
private static Generator instantiateAndInitializeGenerator(
Value value,
Annotation annotation,
BeanContainer beanContainer,
GeneratorCreationContext creationContext,
Class<? extends Generator> generatorClass,
MemberDetails memberDetails,
Class<? extends Annotation> annotationType) {
final var generator = instantiateGenerator(
annotation,
beanContainer,
creationContext,
generatorClass,
memberDetails,
annotationType
);
callInitialize( annotation, memberDetails, creationContext, generator );
callConfigure( creationContext, generator, emptyMap(), value );
return generator;
}
/**
* Return a {@link GeneratorCreator} for an id attribute annotated
* with an {@linkplain IdGeneratorType id generator annotation}.
*/
private static GeneratorCreator identifierGeneratorCreator(
MemberDetails idAttributeMember,
Annotation annotation,
SimpleValue identifierValue,
BeanContainer beanContainer) {
final var annotationType = annotation.annotationType();
final var idGeneratorAnnotation = annotationType.getAnnotation( IdGeneratorType.class );
assert idGeneratorAnnotation != null;
final var generatorClass = idGeneratorAnnotation.value();
checkGeneratorClass( generatorClass );
return creationContext -> {
final Generator generator =
instantiateAndInitializeGenerator(
identifierValue,
annotation,
beanContainer,
creationContext,
generatorClass,
idAttributeMember,
annotationType
);
checkIdGeneratorTiming( annotationType, generator );
return generator;
};
}
/**
* Instantiate a {@link Generator}, using the given {@link BeanContainer} if any,
* for the case where the generator was specified using a generator annotation.
*
* @param annotation the generator annotation
* @param beanContainer an optional {@code BeanContainer}
* @param generatorClass a class which implements {@code Generator}
*/
private static Generator instantiateGenerator(
Annotation annotation,
BeanContainer beanContainer,
GeneratorCreationContext creationContext,
Class<? extends Generator> generatorClass,
MemberDetails memberDetails,
Class<? extends Annotation> annotationType) {
if ( beanContainer != null ) {
return instantiateGeneratorAsBean(
annotation,
beanContainer,
creationContext,
generatorClass,
memberDetails,
annotationType
);
}
else {
return instantiateGenerator(
annotation,
memberDetails,
annotationType,
creationContext,
generatorClass
);
}
}
/**
* Instantiate a {@link Generator}, using the given {@link BeanContainer},
* for the case where the generator was specified using a generator annotation.
*
* @param annotation the generator annotation
* @param beanContainer an optional {@code BeanContainer}
* @param generatorClass a class which implements {@code Generator}
*/
private static <T extends Generator> Generator instantiateGeneratorAsBean(
Annotation annotation,
BeanContainer beanContainer,
GeneratorCreationContext creationContext,
Class<T> generatorClass,
MemberDetails memberDetails,
Class<? extends Annotation> annotationType) {
return getBean(
beanContainer,
generatorClass,
false,
true,
() -> instantiateGenerator(
annotation,
memberDetails,
annotationType,
creationContext,
generatorClass
)
);
}
/**
* Instantiate a {@link Generator}, using the given {@link BeanContainer},
* for the case where no generator annotation is available.
*
* @param beanContainer an optional {@code BeanContainer}
* @param generatorClass a class which implements {@code Generator}
*/
private static <T extends Generator> T instantiateGeneratorAsBean(
BeanContainer beanContainer,
Class<T> generatorClass) {
return getBean(
beanContainer,
generatorClass,
false,
true,
() -> instantiateGeneratorViaDefaultConstructor( generatorClass )
);
}
/**
* Instantiate a {@link Generator} by calling an appropriate constructor,
* for the case where the generator was specified using a generator annotation.
* We look for three possible signatures:
* <ol>
* <li>{@code (Annotation, Member, GeneratorCreationContext)}</li>
* <li>{@code (Annotation)}</li>
* <li>{@code ()}</li>
* </ol>
* where {@code Annotation} is the generator annotation type.
*
* @param annotation the generator annotation
* @param generatorClass a class which implements {@code Generator}
*/
private static <G extends Generator> G instantiateGenerator(
Annotation annotation,
MemberDetails memberDetails,
Class<? extends Annotation> annotationType,
GeneratorCreationContext creationContext,
Class<? extends G> generatorClass) {
try {
try {
return generatorClass.getConstructor( annotationType, Member.class, GeneratorCreationContext.class )
.newInstance( annotation, memberDetails.toJavaMember(), creationContext);
}
catch (NoSuchMethodException ignore) {
try {
return generatorClass.getConstructor( annotationType )
.newInstance( annotation );
}
catch (NoSuchMethodException i) {
return instantiateGeneratorViaDefaultConstructor( generatorClass );
}
}
}
catch (InvocationTargetException | InstantiationException | IllegalAccessException | IllegalArgumentException e) {
throw new org.hibernate.InstantiationException( "Could not instantiate id generator", generatorClass, e );
}
}
/**
* Instantiate a {@link Generator}, using the given {@link BeanContainer} if any,
* or by calling the default constructor otherwise.
*
* @param beanContainer an optional {@code BeanContainer}
* @param generatorClass a class which implements {@code Generator}
*/
public static <T extends Generator> T instantiateGenerator(BeanContainer beanContainer, Class<T> generatorClass) {
return beanContainer != null
? instantiateGeneratorAsBean( beanContainer, generatorClass )
: instantiateGeneratorViaDefaultConstructor( generatorClass );
}
/**
* Instantiate a {@link Generator} by calling the default constructor.
*/
private static <G extends Generator> G instantiateGeneratorViaDefaultConstructor(Class<? extends G> generatorClass) {
try {
return generatorClass.getDeclaredConstructor().newInstance();
}
catch (NoSuchMethodException e) {
throw new org.hibernate.InstantiationException( "No appropriate constructor for id generator class", generatorClass);
}
catch (Exception e) {
throw new org.hibernate.InstantiationException( "Could not instantiate id generator", generatorClass, e );
}
}
public static <A extends Annotation> void callInitialize(
A annotation,
MemberDetails memberDetails,
GeneratorCreationContext creationContext,
Generator generator) {
if ( generator instanceof AnnotationBasedGenerator ) {
// This will cause a CCE in case the generation type doesn't match the annotation type; As this would be
// a programming error of the generation type developer and thus should show up during testing, we don't
// check this explicitly; If required, this could be done e.g. using ClassMate
@SuppressWarnings("unchecked")
final var generation = (AnnotationBasedGenerator<A>) generator;
generation.initialize( annotation, memberDetails.toJavaMember(), creationContext );
}
}
private static void checkVersionGenerationAlways(MemberDetails property, Generator generator) {
if ( property.hasDirectAnnotationUsage( Version.class ) ) {
if ( !generator.generatesOnInsert() ) {
throw new AnnotationException("Property '" + property.getName()
+ "' is annotated '@Version' but has a 'Generator' which does not generate on inserts"
);
}
if ( !generator.generatesOnUpdate() ) {
throw new AnnotationException("Property '" + property.getName()
+ "' is annotated '@Version' but has a 'Generator' which does not generate on updates"
);
}
}
}
/**
* If the given {@link Generator} also implements {@link Configurable},
* call its {@link Configurable#configure(GeneratorCreationContext, Properties)
* configure()} method.
*/
public static void callConfigure(
GeneratorCreationContext creationContext,
Generator generator,
Map<String, Object> configuration,
Value value) {
if ( generator instanceof Configurable configurable ) {
final var parameters = collectParameters(
value,
creationContext.getDatabase().getDialect(),
creationContext.getRootClass(),
configuration,
creationContext.getServiceRegistry()
.requireService( ConfigurationService.class )
);
configurable.configure( creationContext, parameters );
}
if ( generator instanceof ExportableProducer exportableProducer ) {
exportableProducer.registerExportables( creationContext.getDatabase() );
}
if ( generator instanceof Configurable configurable ) {
configurable.initialize( creationContext.getSqlStringGenerationContext() );
}
}
private static void checkIdGeneratorTiming(Class<? extends Annotation> annotationType, Generator generator) {
if ( !generator.generatesOnInsert() ) {
throw new MappingException( "Annotation '" + annotationType
+ "' is annotated 'IdGeneratorType' but the given 'Generator' does not generate on inserts" );
}
if ( generator.generatesOnUpdate() ) {
throw new MappingException( "Annotation '" + annotationType
+ "' is annotated 'IdGeneratorType' but the given 'Generator' generates on updates (it must generate only on inserts)" );
}
}
/**
* Create a generator, based on a {@link GeneratedValue} annotation.
*/
private static void createIdGenerator(
MemberDetails idMember,
SimpleValue idValue,
PersistentClass persistentClass,
MetadataBuildingContext context) {
// NOTE: `generatedValue` is never null here
final var generatedValue = castNonNull( idMember.getDirectAnnotationUsage( GeneratedValue.class ) );
final var metadataCollector = context.getMetadataCollector();
if ( isGlobalGeneratorNameGlobal( context ) ) {
// process and register any generators defined on the member.
// according to JPA these are also global.
metadataCollector.getGlobalRegistrations().as( GlobalRegistrar.class ).collectIdGenerators( idMember );
metadataCollector.addSecondPass( new StrictIdGeneratorResolverSecondPass(
persistentClass,
idValue,
idMember,
generatedValue,
context
) );
}
else {
metadataCollector.addSecondPass( new IdGeneratorResolverSecondPass(
persistentClass,
idValue,
idMember,
generatedValue,
context
) );
}
}
public static void createGeneratorFrom(
IdentifierGeneratorDefinition defaultedGenerator,
SimpleValue idValue,
Map<String, Object> configuration,
MetadataBuildingContext context) {
configuration.putAll( defaultedGenerator.getParameters() );
instantiateNamedStrategyGenerator( idValue, defaultedGenerator.getStrategy(), configuration, context );
}
public static void createGeneratorFrom(
IdentifierGeneratorDefinition defaultedGenerator,
SimpleValue idValue,
MetadataBuildingContext context) {
createGeneratorFrom( defaultedGenerator, idValue, buildConfigurationMap( idValue ), context );
}
private static Map<String, Object> buildConfigurationMap(KeyValue idValue) {
final Map<String,Object> configuration = new HashMap<>();
configuration.put( PersistentIdentifierGenerator.TABLE, idValue.getTable().getName() );
if ( idValue.getColumnSpan() == 1 ) {
configuration.put( PersistentIdentifierGenerator.PK, idValue.getColumns().get(0).getName() );
}
return configuration;
}
/**
* Set up the identifier generator for an id defined in a {@code hbm.xml} mapping.
*
* @see org.hibernate.boot.model.source.internal.hbm.ModelBinder
*/
public static void makeIdGenerator(
final MappingDocument sourceDocument,
IdentifierGeneratorDefinition definition,
SimpleValue identifierValue,
MetadataBuildingContext context) {
if ( definition != null ) {
// see if the specified generator name matches a registered <identifier-generator/>
final var generatorDef =
sourceDocument.getMetadataCollector()
.getIdentifierGenerator( definition.getName() );
final Map<String,Object> configuration = new HashMap<>();
final String generatorStrategy;
if ( generatorDef != null ) {
generatorStrategy = generatorDef.getStrategy();
configuration.putAll( generatorDef.getParameters() );
}
else {
generatorStrategy = definition.getStrategy();
}
configuration.putAll( definition.getParameters() );
setGeneratorCreator( identifierValue, configuration, generatorStrategy, context );
}
}
/**
* Obtain a {@link BeanContainer} to be used for instantiating generators.
*/
public static BeanContainer beanContainer(MetadataBuildingContext buildingContext) {
return Helper.getBeanContainer( buildingContext.getBootstrapContext().getServiceRegistry() );
}
/**
* Set up the {@link GeneratorCreator} for a case where there is no
* generator annotation.
*/
private static void setGeneratorCreator(
SimpleValue identifierValue,
Map<String, Object> configuration,
String generatorStrategy,
MetadataBuildingContext context) {
if ( ASSIGNED_GENERATOR_NAME.equals( generatorStrategy )
|| org.hibernate.id.Assigned.class.getName().equals( generatorStrategy ) ) {
identifierValue.setCustomIdGeneratorCreator( ASSIGNED_IDENTIFIER_GENERATOR_CREATOR );
}
else {
instantiateNamedStrategyGenerator( identifierValue, generatorStrategy, configuration, context );
}
}
private static void instantiateNamedStrategyGenerator(
SimpleValue identifierValue,
String generatorStrategy,
Map<String, Object> configuration,
MetadataBuildingContext context) {
final var beanContainer = beanContainer( context );
identifierValue.setCustomIdGeneratorCreator( creationContext -> {
final var identifierGenerator =
instantiateGenerator( beanContainer, generatorClass( generatorStrategy, identifierValue ) );
// in this code path, there's no generator annotation,
// and therefore no need to call initialize()
callConfigure( creationContext, identifierGenerator, configuration, identifierValue );
if ( identifierGenerator instanceof IdentityGenerator) {
identifierValue.setColumnToIdentity();
}
return identifierGenerator;
} );
}
/**
* Set up the id generator by considering all annotations of the identifier
* field, including {@linkplain IdGeneratorType id generator annotations},
* and {@link GeneratedValue}.
*/
static void createIdGeneratorsFromGeneratorAnnotations(
PropertyHolder propertyHolder,
PropertyData inferredData,
SimpleValue idValue,
MetadataBuildingContext buildingContext) {
final var modelsContext = buildingContext.getBootstrapContext().getModelsContext();
final var idMemberDetails = inferredData.getAttributeMember();
final var idGeneratorAnnotations = idMemberDetails.getMetaAnnotated( IdGeneratorType.class, modelsContext );
final var generatorAnnotations = idMemberDetails.getMetaAnnotated( ValueGenerationType.class, modelsContext );
// Since these collections may contain proxies created by common-annotations module, we cannot reliably use
// simple remove/removeAll collection methods as those proxies do not implement hashcode/equals and even a
// simple 'a.equals(a)' will return 'false'. Instead, we will check the annotation types. Since generator
// annotations should not be "repeatable", we should have only at most one annotation for a generator.
for ( var id : idGeneratorAnnotations ) {
generatorAnnotations.removeIf( gen -> gen.annotationType().equals( id.annotationType() ) );
}
if ( idGeneratorAnnotations.size() + generatorAnnotations.size() > 1 ) {
throw new AnnotationException( String.format(
Locale.ROOT,
"Identifier attribute '%s' has too many generator annotations: %s",
getPath( propertyHolder, inferredData ),
combineUntyped( idGeneratorAnnotations, generatorAnnotations )
) );
}
if ( !idGeneratorAnnotations.isEmpty() ) {
idValue.setCustomIdGeneratorCreator( identifierGeneratorCreator(
idMemberDetails,
idGeneratorAnnotations.get(0),
idValue,
beanContainer( buildingContext )
) );
}
else if ( !generatorAnnotations.isEmpty() ) {
// idValue.setCustomGeneratorCreator( generatorCreator( idMemberDetails, generatorAnnotation ) );
throw new AnnotationException( String.format(
Locale.ROOT,
"Identifier attribute '%s' is annotated '%s' which is not an '@IdGeneratorType'",
getPath( propertyHolder, inferredData ),
generatorAnnotations.get(0).annotationType().getName()
) );
}
else if ( idMemberDetails.hasDirectAnnotationUsage( GeneratedValue.class ) ) {
createIdGenerator( idMemberDetails, idValue, propertyHolder.getPersistentClass(), buildingContext );
}
}
/**
* Returns the value generation strategy for the given property, if any, by
* considering {@linkplain ValueGenerationType generator type annotations}.
*/
static GeneratorCreator createValueGeneratorFromAnnotations(
PropertyHolder holder, String propertyName,
Value value, MemberDetails property, MetadataBuildingContext buildingContext) {
final var generatorAnnotations =
property.getMetaAnnotated( ValueGenerationType.class,
buildingContext.getBootstrapContext().getModelsContext() );
return switch ( generatorAnnotations.size() ) {
case 0 -> null;
case 1 -> generatorCreator( property, value, generatorAnnotations.get(0), beanContainer( buildingContext ) );
default -> throw new AnnotationException( "Property '" + qualify( holder.getPath(), propertyName )
+ "' has too many generator annotations: " + generatorAnnotations );
};
}
public static void applyIfNotEmpty(String name, String value, BiConsumer<String,String> consumer) {
if ( isNotEmpty( value ) ) {
consumer.accept( name, value );
}
}
}
|
apache/rya | 35,830 | sail/src/test/java/org/apache/rya/rdftriplestore/inference/InferenceEngineTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.rya.rdftriplestore.inference;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.core.client.mock.MockInstance;
import org.apache.accumulo.core.client.security.tokens.PasswordToken;
import org.apache.rya.accumulo.AccumuloRdfConfiguration;
import org.apache.rya.accumulo.AccumuloRyaDAO;
import org.apache.rya.rdftriplestore.RdfCloudTripleStore;
import org.apache.tinkerpop.gremlin.structure.Graph;
import org.eclipse.rdf4j.model.IRI;
import org.eclipse.rdf4j.model.Resource;
import org.eclipse.rdf4j.model.Value;
import org.eclipse.rdf4j.model.ValueFactory;
import org.eclipse.rdf4j.model.impl.SimpleValueFactory;
import org.eclipse.rdf4j.query.QueryLanguage;
import org.eclipse.rdf4j.repository.sail.SailRepository;
import org.eclipse.rdf4j.repository.sail.SailRepositoryConnection;
import org.junit.After;
import org.junit.Assert;
import org.junit.Test;
import com.google.common.collect.Sets;
import junit.framework.TestCase;
public class InferenceEngineTest extends TestCase {
private Connector connector;
private AccumuloRyaDAO dao;
private static final ValueFactory VF = SimpleValueFactory.getInstance();
private AccumuloRdfConfiguration conf;
private RdfCloudTripleStore store;
private InferenceEngine inferenceEngine;
private SailRepository repository;
private SailRepositoryConnection conn;
@Override
public void setUp() throws Exception {
super.setUp();
dao = new AccumuloRyaDAO();
connector = new MockInstance().getConnector("", new PasswordToken(""));
dao.setConnector(connector);
conf = new AccumuloRdfConfiguration();
dao.setConf(conf);
dao.init();
store = new RdfCloudTripleStore();
store.setConf(conf);
store.setRyaDAO(dao);
inferenceEngine = new InferenceEngine();
inferenceEngine.setRyaDAO(dao);
store.setInferenceEngine(inferenceEngine);
inferenceEngine.refreshGraph();
store.initialize();
repository = new SailRepository(store);
conn = repository.getConnection();
}
@Override
@After
public void tearDown() throws Exception {
conn.close();
repository.shutDown();
store.shutDown();
dao.purge(conf);
dao.destroy();
}
@Test
public void testSubClassGraph() throws Exception {
final String insert = "INSERT DATA { GRAPH <http://updated/test> {\n"
+ " <urn:A> rdfs:subClassOf <urn:C> . \n"
+ " <urn:B> rdfs:subClassOf <urn:C> . \n"
+ " <urn:C> rdfs:subClassOf <urn:D> . \n"
+ " <urn:E> owl:equivalentClass <urn:D> . \n"
+ " <urn:E> rdfs:subClassOf <urn:G> . \n"
+ " <urn:Z> a owl:Class . \n"
+ " <urn:F> owl:equivalentClass <urn:G> . \n"
+ "}}";
conn.prepareUpdate(QueryLanguage.SPARQL, insert).execute();
inferenceEngine.refreshGraph();
final IRI a = VF.createIRI("urn:A");
final IRI b = VF.createIRI("urn:B");
final IRI c = VF.createIRI("urn:C");
final IRI d = VF.createIRI("urn:D");
final IRI e = VF.createIRI("urn:E");
final IRI f = VF.createIRI("urn:F");
final IRI g = VF.createIRI("urn:G");
final IRI z = VF.createIRI("urn:Z");
final IRI missing = VF.createIRI("urn:Missing");
final Set<IRI> empty = new HashSet<>();
final Set<IRI> belowLevel2 = new HashSet<>(Arrays.asList(a, b));
final Set<IRI> belowLevel3 = new HashSet<>(Arrays.asList(a, b, c, d, e));
final Set<IRI> belowLevel4 = new HashSet<>(Arrays.asList(a, b, c, d, e, f, g));
Assert.assertEquals(empty, inferenceEngine.getSubClasses(a));
Assert.assertEquals(empty, inferenceEngine.getSubClasses(b));
Assert.assertEquals(empty, inferenceEngine.getSubClasses(z));
Assert.assertEquals(empty, inferenceEngine.getSubClasses(missing));
Assert.assertEquals(belowLevel2, inferenceEngine.getSubClasses(c));
Assert.assertEquals(belowLevel3, inferenceEngine.getSubClasses(d));
Assert.assertEquals(belowLevel3, inferenceEngine.getSubClasses(e));
Assert.assertEquals(belowLevel4, inferenceEngine.getSubClasses(f));
Assert.assertEquals(belowLevel4, inferenceEngine.getSubClasses(g));
}
@Test
public void testSubPropertyGraph() throws Exception {
final String insert = "INSERT DATA { GRAPH <http://updated/test> {\n"
+ " <urn:p> rdfs:subPropertyOf <urn:q> . \n"
+ " <urn:p> rdfs:subPropertyOf <urn:r> . \n"
+ " <urn:r> owl:equivalentProperty <urn:s> . \n"
+ " <urn:q> rdfs:subPropertyOf <urn:t> . \n"
+ " <urn:t> rdfs:subPropertyOf <urn:u> . \n"
+ " <urn:s> rdfs:subPropertyOf <urn:u> . \n"
+ " <urn:v> owl:equivalentProperty <urn:u> . \n"
+ " <urn:w> a owl:FunctionalProperty . \n"
+ "}}";
conn.prepareUpdate(QueryLanguage.SPARQL, insert).execute();
inferenceEngine.refreshGraph();
final Graph graph = inferenceEngine.getSubPropertyOfGraph();
final IRI p = VF.createIRI("urn:p");
final IRI q = VF.createIRI("urn:q");
final IRI r = VF.createIRI("urn:r");
final IRI s = VF.createIRI("urn:s");
final IRI t = VF.createIRI("urn:t");
final IRI u = VF.createIRI("urn:u");
final IRI v = VF.createIRI("urn:v");
final IRI w = VF.createIRI("urn:w");
final IRI missing = VF.createIRI("urn:Missing");
final Set<IRI> empty = new HashSet<>();
final Set<IRI> belowQ = new HashSet<>(Arrays.asList(p));
final Set<IRI> belowR = new HashSet<>(Arrays.asList(p, r, s));
final Set<IRI> belowT = new HashSet<>(Arrays.asList(p, q));
final Set<IRI> belowU = new HashSet<>(Arrays.asList(p, q, r, s, t, u, v));
Assert.assertEquals(empty, InferenceEngine.findParents(graph, p));
Assert.assertEquals(empty, InferenceEngine.findParents(graph, w));
Assert.assertEquals(empty, InferenceEngine.findParents(graph, missing));
Assert.assertEquals(belowQ, InferenceEngine.findParents(graph, q));
Assert.assertEquals(belowR, InferenceEngine.findParents(graph, r));
Assert.assertEquals(belowR, InferenceEngine.findParents(graph, s));
Assert.assertEquals(belowT, InferenceEngine.findParents(graph, t));
Assert.assertEquals(belowU, InferenceEngine.findParents(graph, u));
Assert.assertEquals(belowU, InferenceEngine.findParents(graph, v));
}
@Test
public void testDomainRange() throws Exception {
final String insert = "INSERT DATA { GRAPH <http://updated/test> {\n"
+ " <urn:p1> rdfs:subPropertyOf <urn:p2> . \n"
+ " <urn:p2> rdfs:subPropertyOf <urn:p3> . \n"
+ " <urn:q1> rdfs:subPropertyOf <urn:q2> . \n"
+ " <urn:q2> rdfs:subPropertyOf <urn:q3> . \n"
+ " <urn:i1> rdfs:subPropertyOf <urn:i2> . \n"
+ " <urn:i2> rdfs:subPropertyOf <urn:i3> . \n"
+ " <urn:j1> rdfs:subPropertyOf <urn:j2> . \n"
+ " <urn:j2> rdfs:subPropertyOf <urn:j3> . \n"
+ " <urn:p2> owl:inverseOf <urn:i2> . \n"
+ " <urn:i1> owl:inverseOf <urn:q2> . \n"
+ " <urn:q1> owl:inverseOf <urn:j2> . \n"
+ " <urn:D1> rdfs:subClassOf <urn:D2> . \n"
+ " <urn:D2> rdfs:subClassOf <urn:D3> . \n"
+ " <urn:R1> rdfs:subClassOf <urn:R2> . \n"
+ " <urn:R2> rdfs:subClassOf <urn:R3> . \n"
+ " <urn:p2> rdfs:domain <urn:D2> . \n"
+ " <urn:p2> rdfs:range <urn:R2> . \n"
+ "}}";
conn.prepareUpdate(QueryLanguage.SPARQL, insert).execute();
inferenceEngine.refreshGraph();
final Set<IRI> hasDomainD1 = inferenceEngine.getPropertiesWithDomain(VF.createIRI("urn:D1"));
final Set<IRI> hasDomainD2 = inferenceEngine.getPropertiesWithDomain(VF.createIRI("urn:D2"));
final Set<IRI> hasDomainD3 = inferenceEngine.getPropertiesWithDomain(VF.createIRI("urn:D3"));
final Set<IRI> hasRangeD1 = inferenceEngine.getPropertiesWithRange(VF.createIRI("urn:D1"));
final Set<IRI> hasRangeD2 = inferenceEngine.getPropertiesWithRange(VF.createIRI("urn:D2"));
final Set<IRI> hasRangeD3 = inferenceEngine.getPropertiesWithRange(VF.createIRI("urn:D3"));
final Set<IRI> hasDomainR1 = inferenceEngine.getPropertiesWithDomain(VF.createIRI("urn:R1"));
final Set<IRI> hasDomainR2 = inferenceEngine.getPropertiesWithDomain(VF.createIRI("urn:R2"));
final Set<IRI> hasDomainR3 = inferenceEngine.getPropertiesWithDomain(VF.createIRI("urn:R3"));
final Set<IRI> hasRangeR1 = inferenceEngine.getPropertiesWithRange(VF.createIRI("urn:R1"));
final Set<IRI> hasRangeR2 = inferenceEngine.getPropertiesWithRange(VF.createIRI("urn:R2"));
final Set<IRI> hasRangeR3 = inferenceEngine.getPropertiesWithRange(VF.createIRI("urn:R3"));
final Set<IRI> empty = new HashSet<>();
final Set<IRI> expectedForward = new HashSet<>();
expectedForward.add(VF.createIRI("urn:p2"));
expectedForward.add(VF.createIRI("urn:p1"));
expectedForward.add(VF.createIRI("urn:q2"));
expectedForward.add(VF.createIRI("urn:q1"));
final Set<IRI> expectedInverse = new HashSet<>();
expectedInverse.add(VF.createIRI("urn:i1"));
expectedInverse.add(VF.createIRI("urn:i2"));
expectedInverse.add(VF.createIRI("urn:j1"));
expectedInverse.add(VF.createIRI("urn:j2"));
Assert.assertEquals(empty, hasDomainD1);
Assert.assertEquals(empty, hasRangeD1);
Assert.assertEquals(empty, hasDomainR1);
Assert.assertEquals(empty, hasRangeR1);
Assert.assertEquals(expectedForward, hasDomainD2);
Assert.assertEquals(expectedInverse, hasRangeD2);
Assert.assertEquals(expectedInverse, hasDomainR2);
Assert.assertEquals(expectedForward, hasRangeR2);
Assert.assertEquals(expectedForward, hasDomainD3);
Assert.assertEquals(expectedInverse, hasRangeD3);
Assert.assertEquals(expectedInverse, hasDomainR3);
Assert.assertEquals(expectedForward, hasRangeR3);
}
@Test
public void testSomeValuesFrom() throws Exception {
final String insert = "INSERT DATA { GRAPH <http://updated/test> {\n"
// base restrictions
+ " <urn:Chair> owl:onProperty <urn:headOf> ; owl:someValuesFrom <urn:Department> .\n"
+ " <urn:Dean> owl:onProperty <urn:headOf> ; owl:someValuesFrom <urn:College> .\n"
// classes related to the restriction type
+ " <urn:ScienceDepartmentChair> rdfs:subClassOf <urn:Chair> .\n"
+ " <urn:Chair> rdfs:subClassOf <urn:Person> .\n"
+ " <urn:Dean> rdfs:subClassOf <urn:Person> .\n"
+ " <urn:Student> rdfs:subClassOf <urn:Person> .\n"
// classes related to the value type
+ " <urn:ScienceDepartment> rdfs:subClassOf <urn:Department> .\n"
+ " <urn:HumanitiesDepartment> rdfs:subClassOf <urn:Department> .\n"
+ " <urn:Department> rdfs:subClassOf <urn:Organization> .\n"
+ " <urn:College> rdfs:subClassOf <urn:Organization> .\n"
// properties related to the restriction property
+ " <urn:temporaryHeadOf> rdfs:subPropertyOf <urn:headOf> .\n"
+ " <urn:headOf> rdfs:subPropertyOf <urn:worksFor> .\n"
+ "}}";
conn.prepareUpdate(QueryLanguage.SPARQL, insert).execute();
inferenceEngine.refreshGraph();
final Set<IRI> properties = new HashSet<>();
properties.add(VF.createIRI("urn:headOf"));
properties.add(VF.createIRI("urn:temporaryHeadOf"));
final Map<Resource, Set<IRI>> chairDerivations = new HashMap<>();
chairDerivations.put(VF.createIRI("urn:Department"), properties);
chairDerivations.put(VF.createIRI("urn:ScienceDepartment"), properties);
chairDerivations.put(VF.createIRI("urn:HumanitiesDepartment"), properties);
final Map<Resource, Set<IRI>> deanDerivations = new HashMap<>();
deanDerivations.put(VF.createIRI("urn:College"), properties);
final Map<Resource, Set<IRI>> combinedDerivations = new HashMap<>(chairDerivations);
combinedDerivations.put(VF.createIRI("urn:College"), properties);
// Get someValuesFrom restrictions given the direct types
Assert.assertEquals(deanDerivations, inferenceEngine.getSomeValuesFromByRestrictionType(VF.createIRI("urn:Dean")));
Assert.assertEquals(chairDerivations, inferenceEngine.getSomeValuesFromByRestrictionType(VF.createIRI("urn:Chair")));
// Finds the subtype's restrictions given the supertype
Assert.assertEquals(combinedDerivations, inferenceEngine.getSomeValuesFromByRestrictionType(VF.createIRI("urn:Person")));
// Finds nothing if given a subtype which is not a restriction
Assert.assertEquals(new HashMap<>(), inferenceEngine.getSomeValuesFromByRestrictionType(VF.createIRI("urn:ScienceDepartmentChair")));
}
@Test
public void testAllValuesFrom() throws Exception {
final String insert = "INSERT DATA { GRAPH <http://updated/test> {\n"
+ " <urn:Dog> owl:onProperty <urn:relative> ; owl:allValuesFrom <urn:Dog> .\n"
+ " <urn:Retriever> rdfs:subClassOf <urn:Dog> .\n"
+ " <urn:Terrier> rdfs:subClassOf <urn:Dog> .\n"
+ " <urn:Terrier> owl:onProperty <urn:relative> ; owl:allValuesFrom <urn:Terrier> .\n"
+ " <urn:Cairn_Terrier> rdfs:subClassOf <urn:Terrier> .\n"
+ " <urn:parent> rdfs:subPropertyOf <urn:relative> .\n"
+ " <urn:Dog> rdfs:subClassOf <urn:Mammal> .\n"
+ " <urn:Person> rdfs:subClassOf <urn:Mammal> .\n"
+ " <urn:Person> owl:onProperty <urn:relative> ; owl:allValuesFrom <urn:Person> .\n"
+ "}}";
conn.prepareUpdate(QueryLanguage.SPARQL, insert).execute();
inferenceEngine.refreshGraph();
final Map<Resource, Set<IRI>> restrictionsImplyingTerrier = new HashMap<>();
final Set<IRI> properties = new HashSet<>();
properties.add(VF.createIRI("urn:parent"));
properties.add(VF.createIRI("urn:relative"));
restrictionsImplyingTerrier.put(VF.createIRI("urn:Terrier"), properties);
restrictionsImplyingTerrier.put(VF.createIRI("urn:Cairn_Terrier"), properties);
Assert.assertEquals(restrictionsImplyingTerrier, inferenceEngine.getAllValuesFromByValueType(VF.createIRI("urn:Terrier")));
final Map<Resource, Set<IRI>> restrictionsImplyingDog = new HashMap<>(restrictionsImplyingTerrier);
restrictionsImplyingDog.put(VF.createIRI("urn:Dog"), properties);
restrictionsImplyingDog.put(VF.createIRI("urn:Retriever"), properties);
Assert.assertEquals(restrictionsImplyingDog, inferenceEngine.getAllValuesFromByValueType(VF.createIRI("urn:Dog")));
final Map<Resource, Set<IRI>> restrictionsImplyingMammal = new HashMap<>(restrictionsImplyingDog);
restrictionsImplyingMammal.put(VF.createIRI("urn:Person"), properties);
Assert.assertEquals(restrictionsImplyingMammal, inferenceEngine.getAllValuesFromByValueType(VF.createIRI("urn:Mammal")));
}
@Test
public void testHasValueGivenProperty() throws Exception {
final String insert = "INSERT DATA { GRAPH <http://updated/test> {\n"
+ " <urn:Biped> owl:onProperty <urn:walksUsingLegs> . \n"
+ " <urn:Biped> owl:hasValue \"2\" . \n"
+ " <urn:Mammal> owl:onProperty <urn:taxon> . \n"
+ " <urn:Mammal> owl:hasValue <urn:Mammalia> . \n"
+ " <urn:Vertebrate> owl:onProperty <urn:taxon> . \n"
+ " <urn:Vertebrate> owl:hasValue <urn:Vertebrata> . \n"
+ " <urn:Tunicate> owl:onProperty <urn:taxon> . \n"
+ " <urn:Tunicate> owl:hasValue <urn:Tunicata> . \n"
+ " <urn:Mammal> rdfs:subClassOf <urn:Vertebrate> . \n"
+ " <urn:Vertebrate> rdfs:subClassOf <urn:Animal> . \n"
+ " <urn:Tunicate> rdfs:subClassOf <urn:Animal> . \n"
+ " <urn:Biped> rdfs:subClassOf <urn:Animal> . \n"
+ "}}";
conn.prepareUpdate(QueryLanguage.SPARQL, insert).execute();
inferenceEngine.refreshGraph();
final Map<Resource, Set<Value>> typeToValueImplications = new HashMap<>();
final Set<Value> vertebrateTaxa = new HashSet<>();
final Set<Value> tunicateTaxa = new HashSet<>();
vertebrateTaxa.add(VF.createIRI("urn:Vertebrata"));
tunicateTaxa.add(VF.createIRI("urn:Tunicata"));
final Set<Value> mammalTaxa = new HashSet<>(vertebrateTaxa);
mammalTaxa.add(VF.createIRI("urn:Mammalia"));
typeToValueImplications.put(VF.createIRI("urn:Vertebrate"), vertebrateTaxa);
typeToValueImplications.put(VF.createIRI("urn:Tunicate"), tunicateTaxa);
typeToValueImplications.put(VF.createIRI("urn:Mammal"), mammalTaxa);
Assert.assertEquals(typeToValueImplications, inferenceEngine.getHasValueByProperty(VF.createIRI("urn:taxon")));
}
@Test
public void testHasValueGivenType() throws Exception {
final String insert = "INSERT DATA { GRAPH <http://updated/test> {\n"
+ " <urn:Biped> owl:onProperty <urn:walksUsingLegs> . \n"
+ " <urn:Biped> owl:hasValue \"2\" . \n"
+ " <urn:Mammal> owl:onProperty <urn:taxon> . \n"
+ " <urn:Mammal> owl:hasValue <urn:Mammalia> . \n"
+ " <urn:Vertebrate> owl:onProperty <urn:taxon> . \n"
+ " <urn:Vertebrate> owl:hasValue <urn:Vertebrata> . \n"
+ " <urn:Tunicate> owl:onProperty <urn:taxon> . \n"
+ " <urn:Tunicate> owl:hasValue <urn:Tunicata> . \n"
+ " <urn:Plant> owl:onProperty <urn:taxon> . \n"
+ " <urn:Plant> owl:hasValue <urn:Plantae> . \n"
+ " <urn:Mammal> rdfs:subClassOf <urn:Vertebrate> . \n"
+ " <urn:Vertebrate> rdfs:subClassOf <urn:Animal> . \n"
+ " <urn:Tunicate> rdfs:subClassOf <urn:Animal> . \n"
+ " <urn:Biped> rdfs:subClassOf <urn:Animal> . \n"
+ "}}";
conn.prepareUpdate(QueryLanguage.SPARQL, insert).execute();
inferenceEngine.refreshGraph();
final IRI legs = VF.createIRI("urn:walksUsingLegs");
final IRI taxon = VF.createIRI("urn:taxon");
// Verify direct restrictions:
final Map<IRI, Set<Value>> valuesImplyingBiped = new HashMap<>();
valuesImplyingBiped.put(legs, new HashSet<>());
valuesImplyingBiped.get(legs).add(VF.createLiteral("2"));
Assert.assertEquals(valuesImplyingBiped, inferenceEngine.getHasValueByType(VF.createIRI("urn:Biped")));
final Map<IRI, Set<Value>> valuesImplyingMammal = new HashMap<>();
valuesImplyingMammal.put(taxon, new HashSet<>());
valuesImplyingMammal.get(taxon).add(VF.createIRI("urn:Mammalia"));
Assert.assertEquals(valuesImplyingMammal, inferenceEngine.getHasValueByType(VF.createIRI("urn:Mammal")));
final Map<IRI, Set<Value>> valuesImplyingTunicate = new HashMap<>();
valuesImplyingTunicate.put(taxon, new HashSet<>());
valuesImplyingTunicate.get(taxon).add(VF.createIRI("urn:Tunicata"));
Assert.assertEquals(valuesImplyingTunicate, inferenceEngine.getHasValueByType(VF.createIRI("urn:Tunicate")));
final Map<IRI, Set<Value>> valuesImplyingPlant = new HashMap<>();
valuesImplyingPlant.put(taxon, new HashSet<>());
valuesImplyingPlant.get(taxon).add(VF.createIRI("urn:Plantae"));
Assert.assertEquals(valuesImplyingPlant, inferenceEngine.getHasValueByType(VF.createIRI("urn:Plant")));
// Verify indirect restrictions given a supertype, including multiple properties where relevant:
final Map<IRI, Set<Value>> valuesImplyingVertebrate = new HashMap<>();
valuesImplyingVertebrate.put(taxon, new HashSet<>(valuesImplyingMammal.get(taxon)));
valuesImplyingVertebrate.get(taxon).add(VF.createIRI("urn:Vertebrata"));
Assert.assertEquals(valuesImplyingVertebrate, inferenceEngine.getHasValueByType(VF.createIRI("urn:Vertebrate")));
final Map<IRI, Set<Value>> valuesImplyingAnimal = new HashMap<>();
valuesImplyingAnimal.put(legs, valuesImplyingBiped.get(legs));
valuesImplyingAnimal.put(taxon, new HashSet<>(valuesImplyingVertebrate.get(taxon)));
valuesImplyingAnimal.get(taxon).addAll(valuesImplyingTunicate.get(taxon));
Assert.assertEquals(valuesImplyingAnimal, inferenceEngine.getHasValueByType(VF.createIRI("urn:Animal")));
}
@Test
public void testUnionOf() throws Exception {
final String ontology = "INSERT DATA { GRAPH <http://updated/test> {\n"
+ " <urn:A> owl:unionOf <urn:list1> . \n"
+ " <urn:B> owl:unionOf <urn:list2> . \n"
+ " <urn:list1> rdf:first <urn:X> . \n"
+ " <urn:list1> rdf:rest <urn:list2> . \n"
+ " <urn:list2> rdf:first <urn:Y> . \n"
+ " <urn:list2> rdf:rest <urn:list3> . \n"
+ " <urn:list3> rdf:first <urn:Z> . \n"
+ " <urn:Y> rdfs:subClassOf <urn:SuperY> . \n"
+ " <urn:SubY> rdfs:subClassOf <urn:Y> . \n"
+ "}}";
conn.prepareUpdate(QueryLanguage.SPARQL, ontology).execute();
inferenceEngine.refreshGraph();
final Set<IRI> subClassesA = inferenceEngine.getSubClasses(VF.createIRI("urn:A"));
final Set<IRI> subClassesB = inferenceEngine.getSubClasses(VF.createIRI("urn:B"));
final Set<IRI> expectedA = new HashSet<>();
final Set<IRI> expectedB = new HashSet<>();
expectedB.add(VF.createIRI("urn:Y"));
expectedB.add(VF.createIRI("urn:SubY"));
expectedB.add(VF.createIRI("urn:Z"));
expectedA.addAll(expectedB);
expectedA.add(VF.createIRI("urn:X"));
Assert.assertEquals(expectedA, subClassesA);
Assert.assertEquals(expectedB, subClassesB);
}
public void testIntersectionOf() throws Exception {
final String ontology = "INSERT DATA { GRAPH <http://updated/test> {\n"
+ " <urn:Mother> owl:intersectionOf _:bnode1 . \n"
+ " _:bnode1 rdf:first <urn:Woman> . \n"
+ " _:bnode1 rdf:rest _:bnode2 . \n"
+ " _:bnode2 rdf:first <urn:Parent> . \n"
+ " _:bnode2 rdf:rest rdf:nil . \n"
+ " <urn:Father> owl:intersectionOf _:bnode3 . \n"
+ " _:bnode3 rdf:first <urn:Man> . \n"
+ " _:bnode3 rdf:rest _:bnode4 . \n"
+ " _:bnode4 rdf:first <urn:Parent> . \n"
+ " _:bnode4 rdf:rest rdf:nil . \n"
+ " <urn:Mom> owl:intersectionOf _:bnode5 . \n"
+ " _:bnode5 rdf:first <urn:Woman> . \n"
+ " _:bnode5 rdf:rest _:bnode6 . \n"
+ " _:bnode6 rdf:first <urn:Parent> . \n"
+ " _:bnode6 rdf:rest rdf:nil . \n"
+ " <urn:Mother> rdfs:subClassOf <urn:ImmediateFamilyMember> . \n"
+ " <urn:ImmediateFamilyMember> rdfs:subClassOf <urn:Relative> . \n"
+ "}}";
conn.prepareUpdate(QueryLanguage.SPARQL, ontology).execute();
inferenceEngine.refreshGraph();
final IRI mother = VF.createIRI("urn:Mother");
final IRI father = VF.createIRI("urn:Father");
final IRI woman = VF.createIRI("urn:Woman");
final IRI parent = VF.createIRI("urn:Parent");
final IRI man = VF.createIRI("urn:Man");
final IRI mom = VF.createIRI("urn:Mom");
final IRI immediateFamilyMember = VF.createIRI("urn:ImmediateFamilyMember");
final IRI relative = VF.createIRI("urn:Relative");
final List<Set<Resource>> intersectionsImplyingMother = Arrays.asList(Sets.newHashSet(woman, parent));
Assert.assertEquals(intersectionsImplyingMother, inferenceEngine.getIntersectionsImplying(mother));
final List<Set<Resource>> intersectionsImplyingFather = Arrays.asList(Sets.newHashSet(man, parent));
Assert.assertEquals(intersectionsImplyingFather, inferenceEngine.getIntersectionsImplying(father));
// Check that Mother is a subclassOf Parent and Woman and
// ImmediateFamilyMember and Relative. Also, Mother is a subclassOf
// Mother and Mom through inferring equivalentClass.
final Set<IRI> motherSuperClassUris = inferenceEngine.getSuperClasses(mother);
Assert.assertNotNull(motherSuperClassUris);
Assert.assertEquals(6, motherSuperClassUris.size());
Assert.assertTrue(motherSuperClassUris.contains(parent));
Assert.assertTrue(motherSuperClassUris.contains(woman));
Assert.assertTrue(motherSuperClassUris.contains(immediateFamilyMember));
Assert.assertTrue(motherSuperClassUris.contains(relative));
Assert.assertTrue(motherSuperClassUris.contains(mother));
Assert.assertTrue(motherSuperClassUris.contains(mom));
// Check that Father is a subclassOf Parent and Man
final Set<IRI> fatherSuperClassUris = inferenceEngine.getSuperClasses(father);
Assert.assertNotNull(fatherSuperClassUris);
Assert.assertEquals(2, fatherSuperClassUris.size());
Assert.assertTrue(fatherSuperClassUris.contains(parent));
Assert.assertTrue(fatherSuperClassUris.contains(man));
// Check that Mom is a subclassOf Parent and Woman and
// ImmediateFamilyMember and Relative. The last 2 should be inferred
// from having the same intersection as Mother. Also, Mom is a
// subclassOf Mother and Mom through inferring equivalentClass.
final Set<IRI> momSuperClassUris = inferenceEngine.getSuperClasses(mom);
Assert.assertNotNull(momSuperClassUris);
Assert.assertEquals(6, momSuperClassUris.size());
Assert.assertTrue(momSuperClassUris.contains(parent));
Assert.assertTrue(momSuperClassUris.contains(woman));
Assert.assertTrue(momSuperClassUris.contains(immediateFamilyMember));
Assert.assertTrue(momSuperClassUris.contains(relative));
Assert.assertTrue(momSuperClassUris.contains(mother));
Assert.assertTrue(momSuperClassUris.contains(mom));
}
@Test
public void testOneOf() throws Exception {
final String ontology = "INSERT DATA { GRAPH <http://updated/test> {\n"
+ " <urn:Suits> owl:oneOf _:bnodeS1 . \n"
+ " _:bnodeS1 rdf:first <urn:Clubs> . \n"
+ " _:bnodeS1 rdf:rest _:bnodeS2 . \n"
+ " _:bnodeS2 rdf:first <urn:Diamonds> . \n"
+ " _:bnodeS2 rdf:rest _:bnodeS3 . \n"
+ " _:bnodeS3 rdf:first <urn:Hearts> . \n"
+ " _:bnodeS3 rdf:rest _:bnodeS4 . \n"
+ " _:bnodeS4 rdf:first <urn:Spades> . \n"
+ " _:bnodeS4 rdf:rest rdf:nil . \n"
+ " <urn:Ranks> owl:oneOf _:bnodeR1 . \n"
+ " _:bnodeR1 rdf:first <urn:Ace> . \n"
+ " _:bnodeR1 rdf:rest _:bnodeR2 . \n"
+ " _:bnodeR2 rdf:first <urn:2> . \n"
+ " _:bnodeR2 rdf:rest _:bnodeR3 . \n"
+ " _:bnodeR3 rdf:first <urn:3> . \n"
+ " _:bnodeR3 rdf:rest _:bnodeR4 . \n"
+ " _:bnodeR4 rdf:first <urn:4> . \n"
+ " _:bnodeR4 rdf:rest _:bnodeR5 . \n"
+ " _:bnodeR5 rdf:first <urn:5> . \n"
+ " _:bnodeR5 rdf:rest _:bnodeR6 . \n"
+ " _:bnodeR6 rdf:first <urn:6> . \n"
+ " _:bnodeR6 rdf:rest _:bnodeR7 . \n"
+ " _:bnodeR7 rdf:first <urn:7> . \n"
+ " _:bnodeR7 rdf:rest _:bnodeR8 . \n"
+ " _:bnodeR8 rdf:first <urn:8> . \n"
+ " _:bnodeR8 rdf:rest _:bnodeR9 . \n"
+ " _:bnodeR9 rdf:first <urn:9> . \n"
+ " _:bnodeR9 rdf:rest _:bnodeR10 . \n"
+ " _:bnodeR10 rdf:first <urn:10> . \n"
+ " _:bnodeR10 rdf:rest _:bnodeR11 . \n"
+ " _:bnodeR11 rdf:first <urn:Jack> . \n"
+ " _:bnodeR11 rdf:rest _:bnodeR12 . \n"
+ " _:bnodeR12 rdf:first <urn:Queen> . \n"
+ " _:bnodeR12 rdf:rest _:bnodeR13 . \n"
+ " _:bnodeR13 rdf:first <urn:King> . \n"
+ " _:bnodeR13 rdf:rest rdf:nil . \n"
+ "}}";
conn.prepareUpdate(QueryLanguage.SPARQL, ontology).execute();
inferenceEngine.refreshGraph();
final IRI suits = VF.createIRI("urn:Suits");
final IRI ranks = VF.createIRI("urn:Ranks");
final IRI clubs = VF.createIRI("urn:Clubs");
final IRI diamonds = VF.createIRI("urn:Diamonds");
final IRI hearts = VF.createIRI("urn:Hearts");
final IRI spades = VF.createIRI("urn:Spades");
final IRI ace = VF.createIRI("urn:Ace");
final IRI two = VF.createIRI("urn:2");
final IRI three = VF.createIRI("urn:3");
final IRI four = VF.createIRI("urn:4");
final IRI five = VF.createIRI("urn:5");
final IRI six = VF.createIRI("urn:6");
final IRI seven = VF.createIRI("urn:7");
final IRI eight = VF.createIRI("urn:8");
final IRI nine = VF.createIRI("urn:9");
final IRI ten = VF.createIRI("urn:10");
final IRI jack = VF.createIRI("urn:Jack");
final IRI queen = VF.createIRI("urn:Queen");
final IRI king = VF.createIRI("urn:King");
final IRI joker = VF.createIRI("urn:Joker");
final boolean isJokerEnumeratedType = inferenceEngine.isEnumeratedType(joker);
Assert.assertFalse(isJokerEnumeratedType);
final boolean isSuitsEnumeratedType = inferenceEngine.isEnumeratedType(suits);
Assert.assertTrue(isSuitsEnumeratedType);
final Set<Resource> enumerationImplyingSuits = Sets.newHashSet(clubs, diamonds, hearts, spades);
final Set<Resource> actualCardSuits = inferenceEngine.getEnumeration(suits);
Assert.assertEquals(enumerationImplyingSuits, actualCardSuits);
final boolean isRanksEnumeratedType = inferenceEngine.isEnumeratedType(ranks);
Assert.assertTrue(isRanksEnumeratedType);
final Set<Resource> enumerationImplyingRanks = Sets.newHashSet(ace, two, three, four, five, six, seven, eight, nine, ten, jack, queen, king);
final Set<Resource> actualCardRanks = inferenceEngine.getEnumeration(ranks);
Assert.assertEquals(enumerationImplyingRanks, actualCardRanks);
}
@Test
public void hasSelfTest() throws Exception {
final String ontology = "INSERT DATA { GRAPH <http://updated/test> {\n"
+ " <urn:Narcissist> owl:onProperty <urn:love> ; owl:hasSelf \"true\" . \n" + "}}";
conn.prepareUpdate(QueryLanguage.SPARQL, ontology).execute();
inferenceEngine.refreshGraph();
final Set<Resource> expectedTypes = new HashSet<>();
expectedTypes.add(VF.createIRI("urn:Narcissist"));
Assert.assertEquals(expectedTypes, inferenceEngine.getHasSelfImplyingProperty(VF.createIRI("urn:love")));
final Set<IRI> expectedProperties = new HashSet<>();
expectedProperties.add(VF.createIRI("urn:love"));
Assert.assertEquals(expectedProperties, inferenceEngine.getHasSelfImplyingType(VF.createIRI("urn:Narcissist")));
}
@Test
public void testPropertyTypes() throws Exception {
final String ontology = "INSERT DATA { GRAPH <http://updated/test> {\n"
+ " <urn:comment> a owl:AnnotationProperty .\n"
+ " <urn:olderThan> a owl:TransitiveProperty, owl:IrreflexiveProperty, owl:AsymmetricProperty .\n"
+ " <urn:notYoungerThan> a owl:TransitiveProperty, owl:ReflexiveProperty .\n"
+ " <urn:related> a owl:Property, owl:SymmetricProperty, owl:TransitiveProperty .\n"
+ " <urn:knows> a owl:SymmetricProperty, owl:ObjectProperty, owl:ReflexiveProperty .\n"
+ " <urn:sameAgeAs> a owl:SymmetricProperty, owl:ReflexiveProperty, owl:TransitiveProperty .\n"
+ "}}";
conn.prepareUpdate(QueryLanguage.SPARQL, ontology).execute();
inferenceEngine.refreshGraph();
final IRI comment = VF.createIRI("urn:comment"); // none of the three supported types
final IRI older = VF.createIRI("urn:olderThan"); // transitive only
final IRI notYounger = VF.createIRI("urn:notYoungerThan"); // transitive and reflexive
final IRI related = VF.createIRI("urn:related"); // transitive and symmetric
final IRI knows = VF.createIRI("urn:knows"); // reflexive and symmetric
final IRI sameAge = VF.createIRI("urn:sameAgeAs"); // all three
// symmetry
Assert.assertFalse(inferenceEngine.isSymmetricProperty(comment));
Assert.assertFalse(inferenceEngine.isSymmetricProperty(older));
Assert.assertFalse(inferenceEngine.isSymmetricProperty(notYounger));
Assert.assertTrue(inferenceEngine.isSymmetricProperty(related));
Assert.assertTrue(inferenceEngine.isSymmetricProperty(knows));
Assert.assertTrue(inferenceEngine.isSymmetricProperty(sameAge));
// transitivity
Assert.assertFalse(inferenceEngine.isTransitiveProperty(comment));
Assert.assertTrue(inferenceEngine.isTransitiveProperty(older));
Assert.assertTrue(inferenceEngine.isTransitiveProperty(notYounger));
Assert.assertTrue(inferenceEngine.isTransitiveProperty(related));
Assert.assertFalse(inferenceEngine.isTransitiveProperty(knows));
Assert.assertTrue(inferenceEngine.isTransitiveProperty(sameAge));
// reflexivity
Assert.assertFalse(inferenceEngine.isReflexiveProperty(comment));
Assert.assertFalse(inferenceEngine.isReflexiveProperty(older));
Assert.assertTrue(inferenceEngine.isReflexiveProperty(notYounger));
Assert.assertFalse(inferenceEngine.isReflexiveProperty(related));
Assert.assertTrue(inferenceEngine.isReflexiveProperty(knows));
Assert.assertTrue(inferenceEngine.isReflexiveProperty(sameAge));
}
}
|
apache/trafodion | 36,074 | tests/phx/src/test/java/org/trafodion/phoenix/end2end/ArithmeticQueryTest.java | /*******************************************************************************
* Copyright (c) 2013, Salesforce.com, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of Salesforce.com nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
******************************************************************************/
/**********************************
*
* Later modifications to test Trafodion instead of Phoenix were granted to ASF.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*************************************/
package test.java.org.trafodion.phoenix.end2end;
import static org.junit.Assert.*;
import org.junit.*;
import java.sql.*;
import java.util.*;
import java.math.*;
public class ArithmeticQueryTest extends BaseTest {
@BeforeClass
public static void doTestSuiteSetup() throws Exception {
/* List all of the object names being used in this entire class.
* The objects are dropped with errors ignored, so it is OK if the
* object does not exist for a particular test.
*/
objDropList = new ArrayList<String>(
Arrays.asList("table testDecimalArithmetic", "table testDecimalArithmatic", "table source", "table target"));
doBaseTestSuiteSetup();
}
/* @AfterClass, @Before, @After are defined in BaseTest */
@Test
public void testDecimalUpsertValue() throws Exception {
printTestDescription();
try {
String ddl = null;
if (tgtPH()) ddl = "CREATE TABLE IF NOT EXISTS testDecimalArithmetic" +
" (pk VARCHAR NOT NULL PRIMARY KEY, " +
"col1 DECIMAL(31,0), col2 DECIMAL(5), col3 DECIMAL(5,2), col4 DECIMAL)";
else if (tgtTR()) ddl = "CREATE TABLE IF NOT EXISTS testDecimalArithmetic" +
" (pk VARCHAR(128) NOT NULL PRIMARY KEY, " +
"col1 DECIMAL(18,0), col2 DECIMAL(5), col3 DECIMAL(5,2), col4 DECIMAL)";
else if (tgtSQ()) ddl = "CREATE TABLE testDecimalArithmetic" +
" (pk VARCHAR(128) NOT NULL PRIMARY KEY, " +
"col1 DECIMAL(18,0), col2 DECIMAL(5), col3 DECIMAL(5,2), col4 DECIMAL)";
conn.createStatement().execute(ddl);
conn.setAutoCommit(false);
// Test upsert correct values
String query = null;
if (tgtPH()||tgtTR()) query = "UPSERT INTO testDecimalArithmetic(pk, col1, col2, col3, col4) VALUES(?,?,?,?,?)";
else if (tgtSQ()) query = "INSERT INTO testDecimalArithmetic(pk, col1, col2, col3, col4) VALUES(?,?,?,?,?)";
PreparedStatement stmt = conn.prepareStatement(query);
stmt.setString(1, "valueOne");
stmt.setBigDecimal(2, new BigDecimal("123456789123456789"));
stmt.setBigDecimal(3, new BigDecimal("12345"));
stmt.setBigDecimal(4, new BigDecimal("12.34"));
stmt.setBigDecimal(5, new BigDecimal("12345.6789"));
stmt.execute();
conn.commit();
query = "SELECT col1, col2, col3, col4 FROM testDecimalArithmetic WHERE pk = 'valueOne'";
stmt = conn.prepareStatement(query);
ResultSet rs = stmt.executeQuery();
assertTrue(rs.next());
assertEquals(new BigDecimal("123456789123456789"), rs.getBigDecimal(1));
assertEquals(new BigDecimal("12345"), rs.getBigDecimal(2));
assertEquals(new BigDecimal("12.34"), rs.getBigDecimal(3));
if (tgtPH()) assertEquals(new BigDecimal("12345.6789"), rs.getBigDecimal(4));
else if (tgtSQ()||tgtTR()) assertTrue(rs.getBigDecimal(4).equals(new BigDecimal("12345")) || rs.getBigDecimal(4).equals(new BigDecimal("12346")));
assertFalse(rs.next());
if (tgtPH()||tgtTR()) query = "UPSERT INTO testDecimalArithmetic(pk, col1, col2, col3) VALUES(?,?,?,?)";
else if (tgtSQ()) query = "INSERT INTO testDecimalArithmetic(pk, col1, col2, col3) VALUES(?,?,?,?)";
stmt = conn.prepareStatement(query);
stmt.setString(1, "valueTwo");
if (tgtPH()) stmt.setBigDecimal(2, new BigDecimal("1234567890123456789012345678901.12345"));
else if (tgtSQ()||tgtTR()) stmt.setBigDecimal(2, new BigDecimal("123456789012345678.12345"));
stmt.setBigDecimal(3, new BigDecimal("12345.6789"));
stmt.setBigDecimal(4, new BigDecimal("123.45678"));
stmt.execute();
conn.commit();
query = "SELECT col1, col2, col3 FROM testDecimalArithmetic WHERE pk = 'valueTwo'";
stmt = conn.prepareStatement(query);
rs = stmt.executeQuery();
assertTrue(rs.next());
if (tgtPH()) assertEquals(new BigDecimal("1234567890123456789012345678901"), rs.getBigDecimal(1));
else if (tgtSQ()||tgtTR()) assertEquals(new BigDecimal("123456789012345678"), rs.getBigDecimal(1));
assertTrue(rs.getBigDecimal(2).equals(new BigDecimal("12345")) || rs.getBigDecimal(2).equals(new BigDecimal("12346")));
assertTrue(rs.getBigDecimal(3).equals(new BigDecimal("123.45")) || rs.getBigDecimal(3).equals(new BigDecimal("123.46")));
assertFalse(rs.next());
// Test upsert incorrect values and confirm exceptions would be thrown.
try {
if (tgtPH()||tgtTR()) query = "UPSERT INTO testDecimalArithmetic(pk, col1, col2, col3) VALUES(?,?,?,?)";
else if (tgtSQ()) query = "INSERT INTO testDecimalArithmetic(pk, col1, col2, col3) VALUES(?,?,?,?)";
stmt = conn.prepareStatement(query);
stmt.setString(1, "badValues");
// one more than max_precision
stmt.setBigDecimal(2, new BigDecimal("12345678901234567890123456789012"));
stmt.setBigDecimal(3, new BigDecimal("12345"));
stmt.setBigDecimal(4, new BigDecimal("123.45"));
stmt.execute();
conn.commit();
fail("Should have caught bad values.");
} catch (Exception e) {
if (tgtPH()) assertTrue(e.getMessage(), e.getMessage().contains("ERROR 206 (22003): The value is outside the range for the data type. value=12345678901234567890123456789012 columnName=COL1"));
// This error message is different between T2 and T4.
// Only make sure that we get an exception now.
// else if (tgtSQ()||tgtTR()) assertTrue(e.getMessage(), e.getMessage().contains("*** ERROR[29188]"));
}
try {
if (tgtPH()||tgtTR()) query = "UPSERT INTO testDecimalArithmetic(pk, col1, col2, col3) VALUES(?,?,?,?)";
else if (tgtSQ()) query = "INSERT INTO testDecimalArithmetic(pk, col1, col2, col3) VALUES(?,?,?,?)";
stmt = conn.prepareStatement(query);
stmt.setString(1, "badValues");
stmt.setBigDecimal(2, new BigDecimal("123456"));
// Exceeds specified precision by 1
stmt.setBigDecimal(3, new BigDecimal("123456"));
stmt.setBigDecimal(4, new BigDecimal("123.45"));
stmt.execute();
conn.commit();
fail("Should have caught bad values.");
} catch (Exception e) {
if (tgtPH()) assertTrue(e.getMessage(), e.getMessage().contains("ERROR 206 (22003): The value is outside the range for the data type. value=123456 columnName=COL2"));
// This error message is different between T2 and T4.
// Only make sure that we get an exception now.
// else if (tgtSQ()||tgtTR()) assertTrue(e.getMessage(), e.getMessage().contains("*** ERROR[29188]"));
}
} finally {
}
}
@Test
public void testDecimalUpsertSelect() throws Exception {
printTestDescription();
try {
String ddl = null;
if (tgtPH()) ddl = "CREATE TABLE IF NOT EXISTS source" +
" (pk VARCHAR NOT NULL PRIMARY KEY, col1 DECIMAL(5,2), col2 DECIMAL(5,1), col3 DECIMAL(5,2), col4 DECIMAL(4,4))";
else if (tgtTR()) ddl = "CREATE TABLE IF NOT EXISTS source" +
" (pk VARCHAR(128) NOT NULL PRIMARY KEY, col1 DECIMAL(5,2), col2 DECIMAL(5,1), col3 DECIMAL(5,2), col4 DECIMAL(4,4))";
else if (tgtSQ()) ddl = "CREATE TABLE source" +
" (pk VARCHAR(128) NOT NULL PRIMARY KEY, col1 DECIMAL(5,2), col2 DECIMAL(5,1), col3 DECIMAL(5,2), col4 DECIMAL(4,4))";
conn.createStatement().execute(ddl);
if (tgtPH()) ddl = "CREATE TABLE IF NOT EXISTS target" +
" (pk VARCHAR NOT NULL PRIMARY KEY, col1 DECIMAL(5,1), col2 DECIMAL(5,2), col3 DECIMAL(4,4))";
else if (tgtTR()) ddl = "CREATE TABLE IF NOT EXISTS target" +
" (pk VARCHAR(128) NOT NULL PRIMARY KEY, col1 DECIMAL(5,1), col2 DECIMAL(5,2), col3 DECIMAL(4,4))";
else if (tgtSQ()) ddl = "CREATE TABLE target" +
" (pk VARCHAR(128) NOT NULL PRIMARY KEY, col1 DECIMAL(5,1), col2 DECIMAL(5,2), col3 DECIMAL(4,4))";
conn.createStatement().execute(ddl);
conn.setAutoCommit(false);
String query = null;
if (tgtPH()||tgtTR()) query = "UPSERT INTO source(pk, col1) VALUES(?,?)";
else if (tgtSQ()) query = "INSERT INTO source(pk, col1) VALUES(?,?)";
PreparedStatement stmt = conn.prepareStatement(query);
stmt.setString(1, "1");
stmt.setBigDecimal(2, new BigDecimal("100.12"));
stmt.execute();
conn.commit();
stmt.setString(1, "2");
stmt.setBigDecimal(2, new BigDecimal("100.34"));
stmt.execute();
conn.commit();
// Evaluated on client side.
// source and target in different tables, values scheme compatible.
if (tgtPH()||tgtTR()) query = "UPSERT INTO target(pk, col2) SELECT pk, col1 from source";
else if (tgtSQ()) query = "INSERT INTO target(pk, col2) SELECT pk, col1 from source";
stmt = conn.prepareStatement(query);
stmt.execute();
conn.commit();
if (tgtPH()) query = "SELECT col2 FROM target";
else if (tgtSQ()||tgtTR()) query = "SELECT col2 FROM target order by 1";
stmt = conn.prepareStatement(query);
ResultSet rs = stmt.executeQuery();
assertTrue(rs.next());
assertEquals(new BigDecimal("100.12"), rs.getBigDecimal(1));
assertTrue(rs.next());
assertEquals(new BigDecimal("100.34"), rs.getBigDecimal(1));
assertFalse(rs.next());
// source and target in different tables, values requires scale chopping.
if (tgtPH()||tgtTR()) query = "UPSERT INTO target(pk, col1) SELECT pk, col1 from source";
else if (tgtSQ()) query = "UPDATE target SET col1=(select col1 from source where source.pk=target.pk)";
stmt = conn.prepareStatement(query);
stmt.execute();
conn.commit();
if (tgtPH()) query = "SELECT col1 FROM target";
else if (tgtSQ()||tgtTR()) query = "SELECT col1 FROM target order by 1";
stmt = conn.prepareStatement(query);
rs = stmt.executeQuery();
assertTrue(rs.next());
assertEquals(new BigDecimal("100.1"), rs.getBigDecimal(1));
assertTrue(rs.next());
assertEquals(new BigDecimal("100.3"), rs.getBigDecimal(1));
assertFalse(rs.next());
// source and target in different tables, values scheme incompatible.
try {
if (tgtPH()||tgtTR()) query = "UPSERT INTO target(pk, col3) SELECT pk, col1 from source";
else if (tgtSQ()) query = "INSERT INTO target(pk, col3) SELECT pk, col1 from source";
stmt = conn.prepareStatement(query);
stmt.execute();
conn.commit();
fail("Should have caught bad upsert.");
} catch (Exception e) {
if (tgtPH()) assertTrue(e.getMessage(), e.getMessage().contains("ERROR 206 (22003): The value is outside the range for the data type. columnName=COL3"));
else if (tgtSQ()||tgtTR()) assertTrue(e.getMessage(), e.getMessage().contains("*** ERROR[8411]"));
}
if (tgtPH()) {
// Evaluate on server side.
conn.setAutoCommit(true);
// source and target in same table, values scheme compatible.
if (tgtPH()||tgtTR()) query = "UPSERT INTO source(pk, col3) SELECT pk, col1 from source";
else if (tgtSQ()) query = "UPDATE target SET col3=(select col1 from source where source.pk=target.pk)";
stmt = conn.prepareStatement(query);
stmt.execute();
if (tgtPH()) query = "SELECT col3 FROM source";
else if (tgtSQ()||tgtTR()) query = "SELECT col3 FROM source order by 1";
stmt = conn.prepareStatement(query);
rs = stmt.executeQuery();
assertTrue(rs.next());
assertEquals(new BigDecimal("100.12"), rs.getBigDecimal(1));
assertTrue(rs.next());
assertEquals(new BigDecimal("100.34"), rs.getBigDecimal(1));
assertFalse(rs.next());
// source and target in same table, values requires scale chopping.
if (tgtPH()||tgtTR()) query = "UPSERT INTO source(pk, col2) SELECT pk, col1 from source";
else if (tgtSQ()) query = "UPDATE target SET col2=(select col1 from source where source.pk=target.pk)";
stmt = conn.prepareStatement(query);
stmt.execute();
if (tgtPH()) query = "SELECT col2 FROM source";
else if (tgtSQ()||tgtTR()) query = "SELECT col2 FROM source order by 1";
stmt = conn.prepareStatement(query);
rs = stmt.executeQuery();
assertTrue(rs.next());
assertEquals(new BigDecimal("100.1"), rs.getBigDecimal(1));
assertTrue(rs.next());
assertEquals(new BigDecimal("100.3"), rs.getBigDecimal(1));
assertFalse(rs.next());
// source and target in same table, values scheme incompatible.
if (tgtPH()||tgtTR()) query = "UPSERT INTO source(pk, col4) SELECT pk, col1 from source";
else if (tgtSQ()) query = "UPDATE target SET col4=(select col1 from source where source.pk=target.pk)";
stmt = conn.prepareStatement(query);
stmt.execute();
query = "SELECT col4 FROM source";
stmt = conn.prepareStatement(query);
rs = stmt.executeQuery();
assertTrue(rs.next());
assertNull(rs.getBigDecimal(1));
assertTrue(rs.next());
assertNull(rs.getBigDecimal(1));
assertFalse(rs.next());
}
} finally {
}
}
@Test
public void testDecimalAveraging() throws Exception {
printTestDescription();
try {
String ddl = null;
if (tgtPH()) ddl = "CREATE TABLE IF NOT EXISTS testDecimalArithmatic" +
" (pk VARCHAR NOT NULL PRIMARY KEY, col1 DECIMAL(31, 11), col2 DECIMAL(31,1), col3 DECIMAL(38,1))";
else if (tgtTR()) ddl = "CREATE TABLE IF NOT EXISTS testDecimalArithmatic" +
" (pk VARCHAR(128) NOT NULL PRIMARY KEY, col1 DECIMAL(18, 11), col2 DECIMAL(18,1), col3 DECIMAL(18,1))";
else if (tgtSQ()) ddl = "CREATE TABLE testDecimalArithmatic" +
" (pk VARCHAR(128) NOT NULL PRIMARY KEY, col1 DECIMAL(18, 11), col2 DECIMAL(18,1), col3 DECIMAL(18,1))";
conn.createStatement().execute(ddl);
conn.setAutoCommit(false);
String query = null;
if (tgtPH()||tgtTR()) query = "UPSERT INTO testDecimalArithmatic(pk, col1, col2, col3) VALUES(?,?,?,?)";
else if (tgtSQ()) query = "INSERT INTO testDecimalArithmatic(pk, col1, col2, col3) VALUES(?,?,?,?)";
PreparedStatement stmt = conn.prepareStatement(query);
stmt.setString(1, "1");
if (tgtPH()) {
stmt.setBigDecimal(2, new BigDecimal("99999999999999999999.1"));
stmt.setBigDecimal(3, new BigDecimal("99999999999999999999.1"));
stmt.setBigDecimal(4, new BigDecimal("9999999999999999999999999999999999999.1"));
} else if (tgtSQ()||tgtTR()) {
stmt.setBigDecimal(2, new BigDecimal("9999999.11111111111"));
stmt.setBigDecimal(3, new BigDecimal("99999999999999999.1"));
stmt.setBigDecimal(4, new BigDecimal("99999999999999999.1"));
}
stmt.execute();
conn.commit();
stmt.setString(1, "2");
stmt.setBigDecimal(2, new BigDecimal("0"));
stmt.setBigDecimal(3, new BigDecimal("0"));
stmt.setBigDecimal(4, new BigDecimal("0"));
stmt.execute();
conn.commit();
stmt.setString(1, "3");
stmt.setBigDecimal(2, new BigDecimal("0"));
stmt.setBigDecimal(3, new BigDecimal("0"));
stmt.setBigDecimal(4, new BigDecimal("0"));
stmt.execute();
conn.commit();
// Averaging
// result scale should be: max(max(ls, rs), 4).
// We are not imposing restriction on precisioin.
query = "SELECT avg(col1) FROM testDecimalArithmatic";
stmt = conn.prepareStatement(query);
ResultSet rs = stmt.executeQuery();
assertTrue(rs.next());
BigDecimal result = rs.getBigDecimal(1);
if (tgtPH()) assertEquals(new BigDecimal("33333333333333333333.03333333333"), result);
else if (tgtSQ()||tgtTR()) assertEquals(new BigDecimal("3333333.03703703703"), result);
query = "SELECT avg(col2) FROM testDecimalArithmatic";
stmt = conn.prepareStatement(query);
rs = stmt.executeQuery();
assertTrue(rs.next());
result = rs.getBigDecimal(1);
if (tgtPH()) assertEquals(new BigDecimal("33333333333333333333.0333"), result);
else if (tgtSQ()||tgtTR()) assertEquals(new BigDecimal("33333333333333333.0"), result);
// We cap our decimal to a precision of 38.
query = "SELECT avg(col3) FROM testDecimalArithmatic";
stmt = conn.prepareStatement(query);
rs = stmt.executeQuery();
assertTrue(rs.next());
result = rs.getBigDecimal(1);
if (tgtPH()) assertEquals(new BigDecimal("3333333333333333333333333333333333333"), result);
else if (tgtSQ()||tgtTR()) assertEquals(new BigDecimal("33333333333333333.0"), result);
} finally {
}
}
@Test
public void testDecimalArithmeticWithIntAndLong() throws Exception {
printTestDescription();
try {
String ddl = null;
if (tgtPH()) ddl = "CREATE TABLE IF NOT EXISTS testDecimalArithmatic" +
" (pk VARCHAR NOT NULL PRIMARY KEY, " +
"col1 DECIMAL(38,0), col2 DECIMAL(5, 2), col3 INTEGER, col4 BIGINT, col5 DECIMAL)";
else if (tgtTR()) ddl = "CREATE TABLE IF NOT EXISTS testDecimalArithmatic" +
" (pk VARCHAR(128) NOT NULL PRIMARY KEY, " +
"col1 DECIMAL(18,0), col2 DECIMAL(5, 2), col3 INTEGER, col4 BIGINT, col5 DECIMAL)";
else if (tgtSQ()) ddl = "CREATE TABLE testDecimalArithmatic" +
" (pk VARCHAR(128) NOT NULL PRIMARY KEY, " +
"col1 DECIMAL(18,0), col2 DECIMAL(5, 2), col3 INTEGER, col4 BIGINT, col5 DECIMAL)";
conn.createStatement().execute(ddl);
conn.setAutoCommit(false);
String query = null;
if (tgtPH()||tgtTR()) query = "UPSERT INTO testDecimalArithmatic(pk, col1, col2, col3, col4, col5) VALUES(?,?,?,?,?,?)";
else if (tgtSQ()) query = "INSERT INTO testDecimalArithmatic(pk, col1, col2, col3, col4, col5) VALUES(?,?,?,?,?,?)";
PreparedStatement stmt = conn.prepareStatement(query);
stmt.setString(1, "testValueOne");
if (tgtPH()) stmt.setBigDecimal(2, new BigDecimal("1234567890123456789012345678901"));
else if (tgtSQ()||tgtTR()) stmt.setBigDecimal(2, new BigDecimal("123456789012345678"));
stmt.setBigDecimal(3, new BigDecimal("123.45"));
stmt.setInt(4, 10);
stmt.setLong(5, 10L);
stmt.setBigDecimal(6, new BigDecimal("111.111"));
stmt.execute();
conn.commit();
stmt.setString(1, "testValueTwo");
if (tgtPH()) stmt.setBigDecimal(2, new BigDecimal("12345678901234567890123456789012345678"));
else if (tgtSQ()||tgtTR()) stmt.setBigDecimal(2, new BigDecimal("123456789012345678"));
stmt.setBigDecimal(3, new BigDecimal("123.45"));
stmt.setInt(4, 10);
stmt.setLong(5, 10L);
stmt.setBigDecimal(6, new BigDecimal("123456789.0123456789"));
stmt.execute();
conn.commit();
// INT has a default precision and scale of (10, 0)
// LONG has a default precision and scale of (19, 0)
query = "SELECT col1 + col3 FROM testDecimalArithmatic WHERE pk='testValueOne'";
stmt = conn.prepareStatement(query);
ResultSet rs = stmt.executeQuery();
assertTrue(rs.next());
BigDecimal result = rs.getBigDecimal(1);
if (tgtPH()) assertEquals(new BigDecimal("1234567890123456789012345678911"), result);
else if (tgtSQ()||tgtTR()) assertEquals(new BigDecimal("123456789012345688"), result);
query = "SELECT col1 + col4 FROM testDecimalArithmatic WHERE pk='testValueOne'";
stmt = conn.prepareStatement(query);
rs = stmt.executeQuery();
assertTrue(rs.next());
result = rs.getBigDecimal(1);
if (tgtPH()) assertEquals(new BigDecimal("1234567890123456789012345678911"), result);
else if (tgtSQ()||tgtTR()) assertEquals(new BigDecimal("123456789012345688"), result);
query = "SELECT col2 + col3 FROM testDecimalArithmatic WHERE pk='testValueOne'";
stmt = conn.prepareStatement(query);
rs = stmt.executeQuery();
assertTrue(rs.next());
result = rs.getBigDecimal(1);
assertEquals(new BigDecimal("133.45"), result);
query = "SELECT col2 + col4 FROM testDecimalArithmatic WHERE pk='testValueOne'";
stmt = conn.prepareStatement(query);
rs = stmt.executeQuery();
assertTrue(rs.next());
result = rs.getBigDecimal(1);
if (tgtPH()) assertEquals(new BigDecimal("133.45"), result);
else if (tgtSQ()||tgtTR()) assertEquals(new BigDecimal("133"), result);
query = "SELECT col5 + col3 FROM testDecimalArithmatic WHERE pk='testValueOne'";
stmt = conn.prepareStatement(query);
rs = stmt.executeQuery();
assertTrue(rs.next());
result = rs.getBigDecimal(1);
if (tgtPH()) assertEquals(new BigDecimal("121.111"), result);
else if (tgtSQ()||tgtTR()) assertEquals(new BigDecimal("121"), result);
query = "SELECT col5 + col4 FROM testDecimalArithmatic WHERE pk='testValueOne'";
stmt = conn.prepareStatement(query);
rs = stmt.executeQuery();
assertTrue(rs.next());
result = rs.getBigDecimal(1);
if (tgtPH()) assertEquals(new BigDecimal("121.111"), result);
else if (tgtSQ()||tgtTR()) assertEquals(new BigDecimal("121"), result);
query = "SELECT col1 - col3 FROM testDecimalArithmatic WHERE pk='testValueOne'";
stmt = conn.prepareStatement(query);
rs = stmt.executeQuery();
assertTrue(rs.next());
result = rs.getBigDecimal(1);
if (tgtPH()) assertEquals(new BigDecimal("1234567890123456789012345678891"), result);
else if (tgtSQ()||tgtTR()) assertEquals(new BigDecimal("123456789012345668"), result);
query = "SELECT col1 - col4 FROM testDecimalArithmatic WHERE pk='testValueOne'";
stmt = conn.prepareStatement(query);
rs = stmt.executeQuery();
assertTrue(rs.next());
result = rs.getBigDecimal(1);
if (tgtPH()) assertEquals(new BigDecimal("1234567890123456789012345678891"), result);
else if (tgtSQ()||tgtTR()) assertEquals(new BigDecimal("123456789012345668"), result);
query = "SELECT col2 - col3 FROM testDecimalArithmatic WHERE pk='testValueOne'";
stmt = conn.prepareStatement(query);
rs = stmt.executeQuery();
assertTrue(rs.next());
result = rs.getBigDecimal(1);
assertEquals(new BigDecimal("113.45"), result);
query = "SELECT col2 - col4 FROM testDecimalArithmatic WHERE pk='testValueOne'";
stmt = conn.prepareStatement(query);
rs = stmt.executeQuery();
assertTrue(rs.next());
result = rs.getBigDecimal(1);
if (tgtPH()) assertEquals(new BigDecimal("113.45"), result);
else if (tgtSQ()||tgtTR()) assertEquals(new BigDecimal("113"), result);
query = "SELECT col5 - col3 FROM testDecimalArithmatic WHERE pk='testValueOne'";
stmt = conn.prepareStatement(query);
rs = stmt.executeQuery();
assertTrue(rs.next());
result = rs.getBigDecimal(1);
if (tgtPH()) assertEquals(new BigDecimal("101.111"), result);
else if (tgtSQ()||tgtTR()) assertEquals(new BigDecimal("101"), result);
query = "SELECT col5 - col4 FROM testDecimalArithmatic WHERE pk='testValueOne'";
stmt = conn.prepareStatement(query);
rs = stmt.executeQuery();
assertTrue(rs.next());
result = rs.getBigDecimal(1);
if (tgtPH()) assertEquals(new BigDecimal("101.111"), result);
else if (tgtSQ()||tgtTR()) assertEquals(new BigDecimal("101"), result);
query = "SELECT col1 * col3 FROM testDecimalArithmatic WHERE pk='testValueOne'";
stmt = conn.prepareStatement(query);
rs = stmt.executeQuery();
assertTrue(rs.next());
result = rs.getBigDecimal(1);
if (tgtPH()) assertEquals(new BigDecimal("1.234567890123456789012345678901E+31"), result);
else if (tgtSQ()||tgtTR()) assertEquals(new BigDecimal("1234567890123456780"), result);
query = "SELECT col1 * col4 FROM testDecimalArithmatic WHERE pk='testValueOne'";
stmt = conn.prepareStatement(query);
rs = stmt.executeQuery();
assertTrue(rs.next());
result = rs.getBigDecimal(1);
if (tgtPH()) assertEquals(new BigDecimal("1.234567890123456789012345678901E+31"), result);
else if (tgtSQ()||tgtTR()) assertEquals(new BigDecimal("1234567890123456780"), result);
query = "SELECT col1 * col3 FROM testDecimalArithmatic WHERE pk='testValueOne'";
stmt = conn.prepareStatement(query);
rs = stmt.executeQuery();
assertTrue(rs.next());
result = rs.getBigDecimal(1);
if (tgtPH()) assertEquals(new BigDecimal("1.234567890123456789012345678901E+31"), result);
else if (tgtSQ()||tgtTR()) assertEquals(new BigDecimal("1234567890123456780"), result);
if (tgtPH()) {
try {
query = "SELECT col1 * col3 FROM testDecimalArithmatic WHERE pk='testValueTwo'";
stmt = conn.prepareStatement(query);
rs = stmt.executeQuery();
assertTrue(rs.next());
result = rs.getBigDecimal(1);
fail("Should have caught error.");
} catch (Exception e) {
assertTrue(e.getMessage(), e.getMessage().contains("ERROR 206 (22003): The value is outside the range for the data type. DECIMAL(38,0)"));
}
try {
query = "SELECT col1 * col4 FROM testDecimalArithmatic WHERE pk='testValueTwo'";
stmt = conn.prepareStatement(query);
rs = stmt.executeQuery();
assertTrue(rs.next());
result = rs.getBigDecimal(1);
fail("Should have caught error.");
} catch (Exception e) {
assertTrue(e.getMessage(), e.getMessage().contains("ERROR 206 (22003): The value is outside the range for the data type. DECIMAL(38,0)"));
}
}
query = "SELECT col4 * col5 FROM testDecimalArithmatic WHERE pk='testValueOne'";
stmt = conn.prepareStatement(query);
rs = stmt.executeQuery();
assertTrue(rs.next());
result = rs.getBigDecimal(1);
if (tgtPH()) assertEquals(0, result.compareTo(new BigDecimal("1111.11")));
else if (tgtSQ()||tgtTR()) assertEquals(0, result.compareTo(new BigDecimal("1110")));
query = "SELECT col3 * col5 FROM testDecimalArithmatic WHERE pk='testValueOne'";
stmt = conn.prepareStatement(query);
rs = stmt.executeQuery();
assertTrue(rs.next());
result = rs.getBigDecimal(1);
if (tgtPH()) assertEquals(0, result.compareTo(new BigDecimal("1111.11")));
else if (tgtSQ()||tgtTR()) assertEquals(0, result.compareTo(new BigDecimal("1110")));
query = "SELECT col2 * col4 FROM testDecimalArithmatic WHERE pk='testValueOne'";
stmt = conn.prepareStatement(query);
rs = stmt.executeQuery();
assertTrue(rs.next());
result = rs.getBigDecimal(1);
if (tgtPH()) assertEquals(new BigDecimal("1234.5"), result);
else if (tgtSQ()||tgtTR()) assertEquals(new BigDecimal("1234"), result);
// Result scale has value of 0
query = "SELECT col1 / col3 FROM testDecimalArithmatic WHERE pk='testValueOne'";
stmt = conn.prepareStatement(query);
rs = stmt.executeQuery();
assertTrue(rs.next());
result = rs.getBigDecimal(1);
if (tgtPH()) assertEquals(new BigDecimal("1.2345678901234567890123456789E+29"), result);
else if (tgtSQ()||tgtTR()) assertEquals(new BigDecimal("12345678901234567"), result);
query = "SELECT col1 / col4 FROM testDecimalArithmatic WHERE pk='testValueOne'";
stmt = conn.prepareStatement(query);
rs = stmt.executeQuery();
assertTrue(rs.next());
result = rs.getBigDecimal(1);
if (tgtPH()) assertEquals(new BigDecimal("1.2345678901234567890123456789E+29"), result);
else if (tgtSQ()||tgtTR()) assertEquals(new BigDecimal("12345678901234567"), result);
// Result scale is 2.
query = "SELECT col2 / col3 FROM testDecimalArithmatic WHERE pk='testValueOne'";
stmt = conn.prepareStatement(query);
rs = stmt.executeQuery();
assertTrue(rs.next());
result = rs.getBigDecimal(1);
if (tgtPH()) assertEquals(new BigDecimal("12.34"), result);
else if (tgtSQ()||tgtTR()) assertEquals(new BigDecimal("12.345000000000"), result);
query = "SELECT col2 / col4 FROM testDecimalArithmatic WHERE pk='testValueOne'";
stmt = conn.prepareStatement(query);
rs = stmt.executeQuery();
assertTrue(rs.next());
result = rs.getBigDecimal(1);
if (tgtPH()) assertEquals(new BigDecimal("12.34"), result);
else if (tgtSQ()||tgtTR()) assertEquals(new BigDecimal("12.345000000000000"), result);
// col5 has NO_SCALE, so the result's scale is not expected to be truncated to col5 value's scale of 4
query = "SELECT col5 / col3 FROM testDecimalArithmatic WHERE pk='testValueOne'";
stmt = conn.prepareStatement(query);
rs = stmt.executeQuery();
assertTrue(rs.next());
result = rs.getBigDecimal(1);
if (tgtPH()) assertEquals(new BigDecimal("11.1111"), result);
else if (tgtSQ()||tgtTR()) assertEquals(new BigDecimal("11.100000000"), result);
query = "SELECT col5 / col4 FROM testDecimalArithmatic WHERE pk='testValueOne'";
stmt = conn.prepareStatement(query);
rs = stmt.executeQuery();
assertTrue(rs.next());
result = rs.getBigDecimal(1);
if (tgtPH()) assertEquals(new BigDecimal("11.1111"), result);
else if (tgtSQ()||tgtTR()) assertEquals(new BigDecimal("11.100000000"), result);
} finally {
}
}
}
|
apache/uniffle | 36,111 | integration-test/common/src/test/java/org/apache/uniffle/test/RemoteMergeShuffleWithRssClientTestWhenShuffleFlushed.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.uniffle.test;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import io.netty.buffer.ByteBuf;
import org.apache.hadoop.io.IntWritable;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Assumptions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Timeout;
import org.junit.jupiter.api.io.TempDir;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import org.roaringbitmap.longlong.Roaring64NavigableMap;
import org.apache.uniffle.client.factory.ShuffleClientFactory;
import org.apache.uniffle.client.impl.ShuffleWriteClientImpl;
import org.apache.uniffle.client.record.Record;
import org.apache.uniffle.client.record.reader.KeyValueReader;
import org.apache.uniffle.client.record.reader.RMRecordsReader;
import org.apache.uniffle.client.record.writer.Combiner;
import org.apache.uniffle.client.record.writer.SumByKeyCombiner;
import org.apache.uniffle.common.PartitionRange;
import org.apache.uniffle.common.RemoteStorageInfo;
import org.apache.uniffle.common.ShuffleBlockInfo;
import org.apache.uniffle.common.ShuffleDataDistributionType;
import org.apache.uniffle.common.ShuffleServerInfo;
import org.apache.uniffle.common.config.RssConf;
import org.apache.uniffle.common.rpc.ServerType;
import org.apache.uniffle.common.serializer.Serializer;
import org.apache.uniffle.common.serializer.SerializerFactory;
import org.apache.uniffle.common.serializer.SerializerInstance;
import org.apache.uniffle.common.serializer.SerializerUtils;
import org.apache.uniffle.common.util.BlockIdLayout;
import org.apache.uniffle.common.util.ChecksumUtils;
import org.apache.uniffle.coordinator.CoordinatorConf;
import org.apache.uniffle.proto.RssProtos;
import org.apache.uniffle.server.ShuffleServerConf;
import org.apache.uniffle.server.buffer.ShuffleBufferType;
import org.apache.uniffle.server.storage.MultiPartLocalStorageManager;
import org.apache.uniffle.storage.util.StorageType;
import static org.apache.uniffle.coordinator.CoordinatorConf.COORDINATOR_DYNAMIC_CLIENT_CONF_ENABLED;
import static org.apache.uniffle.server.ShuffleServerConf.SERVER_LOCAL_STORAGE_MANAGER_CLASS;
import static org.apache.uniffle.server.ShuffleServerConf.SERVER_MEMORY_SHUFFLE_HIGHWATERMARK_PERCENTAGE;
import static org.apache.uniffle.server.ShuffleServerConf.SERVER_MEMORY_SHUFFLE_LOWWATERMARK_PERCENTAGE;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class RemoteMergeShuffleWithRssClientTestWhenShuffleFlushed extends ShuffleReadWriteBase {
private static final int SHUFFLE_ID = 0;
private static final int PARTITION_ID = 0;
private static final int RECORD_NUMBER = 1009;
private static ShuffleServerInfo shuffleServerInfo;
private ShuffleWriteClientImpl shuffleWriteClientImpl;
@BeforeAll
public static void setupServers(@TempDir File tmpDir) throws Exception {
CoordinatorConf coordinatorConf = coordinatorConfWithoutPort();
coordinatorConf.setBoolean(COORDINATOR_DYNAMIC_CLIENT_CONF_ENABLED, false);
storeCoordinatorConf(coordinatorConf);
ShuffleServerConf shuffleServerConf =
shuffleServerConfWithoutPort(0, tmpDir, ServerType.GRPC_NETTY);
Assumptions.assumeTrue(
!shuffleServerConf
.get(SERVER_LOCAL_STORAGE_MANAGER_CLASS)
.equals(MultiPartLocalStorageManager.class.getName()),
MultiPartLocalStorageManager.class.getName() + " is not working with remote merge feature");
shuffleServerConf.set(ShuffleServerConf.SERVER_MERGE_ENABLE, true);
shuffleServerConf.set(ShuffleServerConf.SERVER_MERGE_DEFAULT_MERGED_BLOCK_SIZE, "1k");
shuffleServerConf.set(
ShuffleServerConf.SERVER_SHUFFLE_BUFFER_TYPE, ShuffleBufferType.SKIP_LIST);
// Each shuffle data will be flushed!
shuffleServerConf.set(SERVER_MEMORY_SHUFFLE_HIGHWATERMARK_PERCENTAGE, 0.0);
shuffleServerConf.set(SERVER_MEMORY_SHUFFLE_LOWWATERMARK_PERCENTAGE, 0.0);
shuffleServerConf.setLong("rss.server.app.expired.withoutHeartbeat", 10000000);
shuffleServerConf.setString("rss.storage.type", StorageType.LOCALFILE.name());
storeShuffleServerConf(shuffleServerConf);
startServersWithRandomPorts();
shuffleServerInfo =
new ShuffleServerInfo(
LOCALHOST,
nettyShuffleServers.get(0).getGrpcPort(),
nettyShuffleServers.get(0).getNettyPort());
}
public void createClient(String clientType) {
shuffleWriteClientImpl =
new ShuffleWriteClientImpl(
ShuffleClientFactory.newWriteBuilder()
.clientType(clientType)
.retryMax(3)
.retryIntervalMax(1000)
.heartBeatThreadNum(1)
.replica(1)
.replicaWrite(1)
.replicaRead(1)
.replicaSkipEnabled(true)
.dataTransferPoolSize(1)
.dataCommitPoolSize(1)
.unregisterThreadPoolSize(10)
.unregisterRequestTimeSec(10));
}
@AfterEach
public void closeClient() {
shuffleWriteClientImpl.close();
}
@ParameterizedTest
@ValueSource(
strings = {
"org.apache.hadoop.io.Text,org.apache.hadoop.io.IntWritable,GRPC,true",
"org.apache.hadoop.io.Text,org.apache.hadoop.io.IntWritable,GRPC_NETTY,true",
"org.apache.hadoop.io.Text,org.apache.hadoop.io.IntWritable,GRPC,false",
"org.apache.hadoop.io.Text,org.apache.hadoop.io.IntWritable,GRPC_NETTY,false",
"java.lang.String,java.lang.Integer,GRPC",
"java.lang.String,java.lang.Integer,GRPC_NETTY",
"org.apache.uniffle.common.serializer.SerializerUtils$SomeClass,java.lang.Integer,GRPC",
"org.apache.uniffle.common.serializer.SerializerUtils$SomeClass,java.lang.Integer,GRPC_NETTY",
})
@Timeout(10)
public void remoteMergeWriteReadTest(String classes) throws Exception {
// 1 basic parameter
final String[] classArray = classes.split(",");
final String keyClassName = classArray[0];
final String valueClassName = classArray[1];
final Class keyClass = SerializerUtils.getClassByName(keyClassName);
final Class valueClass = SerializerUtils.getClassByName(valueClassName);
final String clientType = classArray[2];
final boolean raw = classArray.length > 3 ? Boolean.parseBoolean(classArray[3]) : false;
final Comparator comparator = SerializerUtils.getComparator(keyClass);
final RssConf rssConf = new RssConf();
// 2 register shuffle
createClient(clientType);
String testAppId = "remoteMergeWriteReadTest" + classes;
shuffleWriteClientImpl.registerShuffle(
shuffleServerInfo,
testAppId,
SHUFFLE_ID,
Lists.newArrayList(new PartitionRange(0, 0)),
new RemoteStorageInfo(""),
ShuffleDataDistributionType.NORMAL,
-1,
RssProtos.MergeContext.newBuilder()
.setKeyClass(keyClass.getName())
.setValueClass(valueClass.getName())
.setComparatorClass(comparator.getClass().getName())
.setMergedBlockSize(-1)
.setMergeClassLoader("")
.build());
// 3 report shuffle result
// task 0 attempt 0 generate three blocks
BlockIdLayout layout = BlockIdLayout.from(rssConf);
List<ShuffleBlockInfo> blocks1 = new ArrayList<>();
blocks1.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
0,
PARTITION_ID,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
0,
5,
RECORD_NUMBER,
1));
blocks1.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
0,
PARTITION_ID,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
2,
5,
RECORD_NUMBER,
1));
blocks1.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
0,
PARTITION_ID,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
4,
5,
RECORD_NUMBER,
1));
shuffleWriteClientImpl.sendShuffleData(testAppId, blocks1, () -> false);
// task 1 attempt 0 generate two blocks
List<ShuffleBlockInfo> blocks2 = new ArrayList<>();
blocks2.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
1,
PARTITION_ID,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
1,
5,
RECORD_NUMBER,
1));
blocks2.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
1,
PARTITION_ID,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
3,
5,
RECORD_NUMBER,
1));
shuffleWriteClientImpl.sendShuffleData(testAppId, blocks2, () -> false);
Map<Integer, List<ShuffleServerInfo>> partitionToServers =
ImmutableMap.of(PARTITION_ID, Lists.newArrayList(shuffleServerInfo));
// 4 report shuffle result
Map<Integer, Set<Long>> ptb = ImmutableMap.of(PARTITION_ID, new HashSet());
ptb.get(PARTITION_ID)
.addAll(blocks1.stream().map(s -> s.getBlockId()).collect(Collectors.toList()));
ptb.get(PARTITION_ID)
.addAll(blocks2.stream().map(s -> s.getBlockId()).collect(Collectors.toList()));
Map<ShuffleServerInfo, Map<Integer, Set<Long>>> serverToPartitionToBlockIds = new HashMap();
serverToPartitionToBlockIds.put(shuffleServerInfo, ptb);
shuffleWriteClientImpl.reportShuffleResult(
serverToPartitionToBlockIds, testAppId, SHUFFLE_ID, 0, 1);
shuffleWriteClientImpl.reportShuffleResult(
serverToPartitionToBlockIds, testAppId, SHUFFLE_ID, 1, 1);
// 5 report unique blocks
Roaring64NavigableMap uniqueBlockIds = Roaring64NavigableMap.bitmapOf();
ptb.get(PARTITION_ID).stream().forEach(block -> uniqueBlockIds.add(block));
shuffleWriteClientImpl.startSortMerge(
Sets.newHashSet(shuffleServerInfo), testAppId, SHUFFLE_ID, PARTITION_ID, uniqueBlockIds);
// 6 read result
Map<Integer, List<ShuffleServerInfo>> serverInfoMap =
ImmutableMap.of(PARTITION_ID, Lists.newArrayList(shuffleServerInfo));
RMRecordsReader reader =
new RMRecordsReader(
testAppId,
SHUFFLE_ID,
Sets.newHashSet(PARTITION_ID),
serverInfoMap,
rssConf,
keyClass,
valueClass,
comparator,
raw,
null,
false,
null,
clientType);
reader.start();
int index = 0;
KeyValueReader keyValueReader = reader.keyValueReader();
while (keyValueReader.hasNext()) {
Record record = keyValueReader.next();
assertEquals(SerializerUtils.genData(keyClass, index), record.getKey());
assertEquals(SerializerUtils.genData(valueClass, index), record.getValue());
index++;
}
assertEquals(5 * RECORD_NUMBER, index);
shuffleWriteClientImpl.unregisterShuffle(testAppId);
}
@ParameterizedTest
@ValueSource(
strings = {
"org.apache.hadoop.io.Text,org.apache.hadoop.io.IntWritable,GRPC,true",
"org.apache.hadoop.io.Text,org.apache.hadoop.io.IntWritable,GRPC_NETTY,true",
"org.apache.hadoop.io.Text,org.apache.hadoop.io.IntWritable,GRPC,false",
"org.apache.hadoop.io.Text,org.apache.hadoop.io.IntWritable,GRPC_NETTY,false",
"java.lang.String,java.lang.Integer,GRPC",
"java.lang.String,java.lang.Integer,GRPC_NETTY",
"org.apache.uniffle.common.serializer.SerializerUtils$SomeClass,java.lang.Integer,GRPC",
"org.apache.uniffle.common.serializer.SerializerUtils$SomeClass,java.lang.Integer,GRPC_NETTY",
})
@Timeout(10)
public void remoteMergeWriteReadTestWithCombine(String classes) throws Exception {
// 1 basic parameter
final String[] classArray = classes.split(",");
final String keyClassName = classArray[0];
final String valueClassName = classArray[1];
final Class keyClass = SerializerUtils.getClassByName(keyClassName);
final Class valueClass = SerializerUtils.getClassByName(valueClassName);
final String clientType = classArray[2];
final boolean raw = classArray.length > 3 ? Boolean.parseBoolean(classArray[3]) : false;
final Comparator comparator = SerializerUtils.getComparator(keyClass);
final RssConf rssConf = new RssConf();
SerializerFactory factory = new SerializerFactory(rssConf);
Serializer serializer = factory.getSerializer(keyClass);
SerializerInstance serializerInstance = serializer.newInstance();
final Combiner combiner = new SumByKeyCombiner(raw, serializerInstance, keyClass, valueClass);
// 2 register shuffle
createClient(clientType);
String testAppId = "remoteMergeWriteReadTestWithCombine" + classes;
shuffleWriteClientImpl.registerShuffle(
shuffleServerInfo,
testAppId,
SHUFFLE_ID,
Lists.newArrayList(new PartitionRange(0, 0)),
new RemoteStorageInfo(""),
ShuffleDataDistributionType.NORMAL,
-1,
RssProtos.MergeContext.newBuilder()
.setKeyClass(keyClass.getName())
.setValueClass(valueClass.getName())
.setComparatorClass(comparator.getClass().getName())
.setMergedBlockSize(-1)
.setMergeClassLoader("")
.build());
Roaring64NavigableMap blockIdBitmap = Roaring64NavigableMap.bitmapOf();
// 3 report shuffle result
// task 0 attempt 0 generate three blocks
BlockIdLayout layout = BlockIdLayout.from(rssConf);
List<ShuffleBlockInfo> blocks1 = new ArrayList<>();
blocks1.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
0,
PARTITION_ID,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
0,
3,
RECORD_NUMBER,
1));
blocks1.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
0,
PARTITION_ID,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
1,
3,
RECORD_NUMBER,
1));
blocks1.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
0,
PARTITION_ID,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
2,
3,
RECORD_NUMBER,
1));
shuffleWriteClientImpl.sendShuffleData(testAppId, blocks1, () -> false);
// task 1 attempt 0 generate two blocks
List<ShuffleBlockInfo> blocks2 = new ArrayList<>();
blocks2.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
1,
PARTITION_ID,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
0,
3,
RECORD_NUMBER,
1));
blocks2.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
1,
PARTITION_ID,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
2,
3,
RECORD_NUMBER,
1));
shuffleWriteClientImpl.sendShuffleData(testAppId, blocks2, () -> false);
Map<Integer, List<ShuffleServerInfo>> partitionToServers =
ImmutableMap.of(PARTITION_ID, Lists.newArrayList(shuffleServerInfo));
// 4 report shuffle result
Map<Integer, Set<Long>> ptb = ImmutableMap.of(PARTITION_ID, new HashSet());
ptb.get(PARTITION_ID)
.addAll(blocks1.stream().map(s -> s.getBlockId()).collect(Collectors.toList()));
ptb.get(PARTITION_ID)
.addAll(blocks2.stream().map(s -> s.getBlockId()).collect(Collectors.toList()));
Map<ShuffleServerInfo, Map<Integer, Set<Long>>> serverToPartitionToBlockIds = new HashMap();
serverToPartitionToBlockIds.put(shuffleServerInfo, ptb);
shuffleWriteClientImpl.reportShuffleResult(
serverToPartitionToBlockIds, testAppId, SHUFFLE_ID, 0, 1);
shuffleWriteClientImpl.reportShuffleResult(
serverToPartitionToBlockIds, testAppId, SHUFFLE_ID, 1, 1);
// 5 report unique blocks
Roaring64NavigableMap uniqueBlockIds = Roaring64NavigableMap.bitmapOf();
ptb.get(PARTITION_ID).stream().forEach(block -> uniqueBlockIds.add(block));
shuffleWriteClientImpl.startSortMerge(
Sets.newHashSet(shuffleServerInfo), testAppId, SHUFFLE_ID, PARTITION_ID, uniqueBlockIds);
// 6 read result
Map<Integer, List<ShuffleServerInfo>> serverInfoMap =
ImmutableMap.of(PARTITION_ID, Lists.newArrayList(shuffleServerInfo));
RMRecordsReader reader =
new RMRecordsReader(
testAppId,
SHUFFLE_ID,
Sets.newHashSet(PARTITION_ID),
serverInfoMap,
rssConf,
keyClass,
valueClass,
comparator,
raw,
combiner,
false,
null,
clientType);
reader.start();
int index = 0;
KeyValueReader keyValueReader = reader.keyValueReader();
while (keyValueReader.hasNext()) {
Record record = keyValueReader.next();
assertEquals(SerializerUtils.genData(keyClass, index), record.getKey());
Object value = SerializerUtils.genData(valueClass, index);
Object newValue = value;
if (index % 3 != 1) {
if (value instanceof IntWritable) {
newValue = new IntWritable(((IntWritable) value).get() * 2);
} else {
newValue = (int) value * 2;
}
}
assertEquals(newValue, record.getValue());
index++;
}
assertEquals(3 * RECORD_NUMBER, index);
shuffleWriteClientImpl.unregisterShuffle(testAppId);
}
@ParameterizedTest
@ValueSource(
strings = {
"org.apache.hadoop.io.Text,org.apache.hadoop.io.IntWritable,GRPC,true",
"org.apache.hadoop.io.Text,org.apache.hadoop.io.IntWritable,GRPC_NETTY,true",
"org.apache.hadoop.io.Text,org.apache.hadoop.io.IntWritable,GRPC,false",
"org.apache.hadoop.io.Text,org.apache.hadoop.io.IntWritable,GRPC_NETTY,false",
"java.lang.String,java.lang.Integer,GRPC",
"java.lang.String,java.lang.Integer,GRPC_NETTY",
"org.apache.uniffle.common.serializer.SerializerUtils$SomeClass,java.lang.Integer,GRPC",
"org.apache.uniffle.common.serializer.SerializerUtils$SomeClass,java.lang.Integer,GRPC_NETTY",
})
@Timeout(10)
public void remoteMergeWriteReadTestMultiPartition(String classes) throws Exception {
// 1 basic parameter
final String[] classArray = classes.split(",");
final String keyClassName = classArray[0];
final String valueClassName = classArray[1];
final Class keyClass = SerializerUtils.getClassByName(keyClassName);
final Class valueClass = SerializerUtils.getClassByName(valueClassName);
final String clientType = classArray[2];
final boolean raw = classArray.length > 3 ? Boolean.parseBoolean(classArray[3]) : false;
final Comparator comparator = SerializerUtils.getComparator(keyClass);
final RssConf rssConf = new RssConf();
// 2 register shuffle
createClient(clientType);
String testAppId = "remoteMergeWriteReadTestMultiPartition" + classes;
shuffleWriteClientImpl.registerShuffle(
shuffleServerInfo,
testAppId,
SHUFFLE_ID,
Lists.newArrayList(
new PartitionRange(PARTITION_ID, PARTITION_ID),
new PartitionRange(PARTITION_ID + 1, PARTITION_ID + 1),
new PartitionRange(PARTITION_ID + 2, PARTITION_ID + 2)),
new RemoteStorageInfo(""),
ShuffleDataDistributionType.NORMAL,
-1,
RssProtos.MergeContext.newBuilder()
.setKeyClass(keyClass.getName())
.setValueClass(valueClass.getName())
.setComparatorClass(comparator.getClass().getName())
.setMergedBlockSize(-1)
.setMergeClassLoader("")
.build());
// 3 report shuffle result
// this shuffle have three partition, which is hash by key index mode 3
// task 0 attempt 0 generate three blocks
BlockIdLayout layout = BlockIdLayout.from(rssConf);
List<ShuffleBlockInfo> blocks1 = new ArrayList<>();
blocks1.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
0,
0,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
0,
6,
RECORD_NUMBER,
1));
blocks1.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
0,
2,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
2,
6,
RECORD_NUMBER,
1));
blocks1.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
0,
1,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
4,
6,
RECORD_NUMBER,
1));
shuffleWriteClientImpl.sendShuffleData(testAppId, blocks1, () -> false);
// task 1 attempt 0 generate two blocks
List<ShuffleBlockInfo> blocks2 = new ArrayList<>();
blocks2.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
1,
1,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
1,
6,
RECORD_NUMBER,
1));
blocks2.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
1,
0,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
3,
6,
RECORD_NUMBER,
1));
blocks2.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
1,
2,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
5,
6,
RECORD_NUMBER,
1));
shuffleWriteClientImpl.sendShuffleData(testAppId, blocks2, () -> false);
Map<Integer, List<ShuffleServerInfo>> partitionToServers =
ImmutableMap.of(
PARTITION_ID,
Lists.newArrayList(shuffleServerInfo),
PARTITION_ID + 1,
Lists.newArrayList(shuffleServerInfo),
PARTITION_ID + 2,
Lists.newArrayList(shuffleServerInfo));
// 4 report shuffle result
Map<Integer, Set<Long>> ptb = new HashMap<>();
for (int i = PARTITION_ID; i < PARTITION_ID + 3; i++) {
final int partitionId = i;
ptb.put(partitionId, new HashSet<>());
ptb.get(partitionId)
.addAll(
blocks1.stream()
.filter(s -> s.getPartitionId() == partitionId)
.map(s -> s.getBlockId())
.collect(Collectors.toList()));
ptb.get(partitionId)
.addAll(
blocks2.stream()
.filter(s -> s.getPartitionId() == partitionId)
.map(s -> s.getBlockId())
.collect(Collectors.toList()));
}
Map<ShuffleServerInfo, Map<Integer, Set<Long>>> serverToPartitionToBlockIds = new HashMap();
serverToPartitionToBlockIds.put(shuffleServerInfo, ptb);
shuffleWriteClientImpl.reportShuffleResult(
serverToPartitionToBlockIds, testAppId, SHUFFLE_ID, 0, 1);
shuffleWriteClientImpl.reportShuffleResult(
serverToPartitionToBlockIds, testAppId, SHUFFLE_ID, 1, 1);
// 5 report unique blocks
for (int i = PARTITION_ID; i < PARTITION_ID + 3; i++) {
Roaring64NavigableMap uniqueBlockIds = Roaring64NavigableMap.bitmapOf();
ptb.get(i).stream().forEach(block -> uniqueBlockIds.add(block));
shuffleWriteClientImpl.startSortMerge(
Sets.newHashSet(shuffleServerInfo), testAppId, SHUFFLE_ID, i, uniqueBlockIds);
}
// 6 read result
Map<Integer, List<ShuffleServerInfo>> serverInfoMap =
ImmutableMap.of(
PARTITION_ID,
Lists.newArrayList(shuffleServerInfo),
PARTITION_ID + 1,
Lists.newArrayList(shuffleServerInfo),
PARTITION_ID + 2,
Lists.newArrayList(shuffleServerInfo));
RMRecordsReader reader =
new RMRecordsReader(
testAppId,
SHUFFLE_ID,
Sets.newHashSet(PARTITION_ID, PARTITION_ID + 1, PARTITION_ID + 2),
serverInfoMap,
rssConf,
keyClass,
valueClass,
comparator,
raw,
null,
false,
null,
clientType);
reader.start();
int index = 0;
KeyValueReader keyValueReader = reader.keyValueReader();
while (keyValueReader.hasNext()) {
Record record = keyValueReader.next();
assertEquals(SerializerUtils.genData(keyClass, index), record.getKey());
assertEquals(SerializerUtils.genData(valueClass, index), record.getValue());
index++;
}
assertEquals(6 * RECORD_NUMBER, index);
shuffleWriteClientImpl.unregisterShuffle(testAppId);
}
@ParameterizedTest
@ValueSource(
strings = {
"org.apache.hadoop.io.Text,org.apache.hadoop.io.IntWritable,GRPC,true",
"org.apache.hadoop.io.Text,org.apache.hadoop.io.IntWritable,GRPC_NETTY,true",
"org.apache.hadoop.io.Text,org.apache.hadoop.io.IntWritable,GRPC,false",
"org.apache.hadoop.io.Text,org.apache.hadoop.io.IntWritable,GRPC_NETTY,false",
"java.lang.String,java.lang.Integer,GRPC",
"java.lang.String,java.lang.Integer,GRPC_NETTY",
"org.apache.uniffle.common.serializer.SerializerUtils$SomeClass,java.lang.Integer,GRPC",
"org.apache.uniffle.common.serializer.SerializerUtils$SomeClass,java.lang.Integer,GRPC_NETTY",
})
@Timeout(10)
public void remoteMergeWriteReadTestMultiPartitionWithCombine(String classes) throws Exception {
// 1 basic parameter
final String[] classArray = classes.split(",");
final String keyClassName = classArray[0];
final String valueClassName = classArray[1];
final Class keyClass = SerializerUtils.getClassByName(keyClassName);
final Class valueClass = SerializerUtils.getClassByName(valueClassName);
final String clientType = classArray[2];
final boolean raw = classArray.length > 3 ? Boolean.parseBoolean(classArray[3]) : false;
final Comparator comparator = SerializerUtils.getComparator(keyClass);
final RssConf rssConf = new RssConf();
SerializerFactory factory = new SerializerFactory(rssConf);
Serializer serializer = factory.getSerializer(keyClass);
SerializerInstance serializerInstance = serializer.newInstance();
final Combiner combiner = new SumByKeyCombiner(raw, serializerInstance, keyClass, valueClass);
// 2 register shuffle
createClient(clientType);
String testAppId = "remoteMergeWriteReadTestMultiPartitionWithCombine" + classes;
shuffleWriteClientImpl.registerShuffle(
shuffleServerInfo,
testAppId,
SHUFFLE_ID,
Lists.newArrayList(
Lists.newArrayList(
new PartitionRange(PARTITION_ID, PARTITION_ID),
new PartitionRange(PARTITION_ID + 1, PARTITION_ID + 1),
new PartitionRange(PARTITION_ID + 2, PARTITION_ID + 2))),
new RemoteStorageInfo(""),
ShuffleDataDistributionType.NORMAL,
-1,
RssProtos.MergeContext.newBuilder()
.setKeyClass(keyClass.getName())
.setValueClass(valueClass.getName())
.setComparatorClass(comparator.getClass().getName())
.setMergedBlockSize(-1)
.setMergeClassLoader("")
.build());
// 3 report shuffle result
// this shuffle have three partition, which is hash by key index mode 3
// task 0 attempt 0 generate three blocks
BlockIdLayout layout = BlockIdLayout.from(rssConf);
List<ShuffleBlockInfo> blocks1 = new ArrayList<>();
blocks1.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
0,
0,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
0,
6,
RECORD_NUMBER,
2));
blocks1.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
0,
2,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
2,
6,
RECORD_NUMBER,
2));
blocks1.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
0,
1,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
4,
6,
RECORD_NUMBER,
2));
shuffleWriteClientImpl.sendShuffleData(testAppId, blocks1, () -> false);
// task 1 attempt 0 generate two blocks
List<ShuffleBlockInfo> blocks2 = new ArrayList<>();
blocks2.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
1,
1,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
1,
6,
RECORD_NUMBER,
2));
blocks2.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
1,
0,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
3,
6,
RECORD_NUMBER,
2));
blocks2.add(
createShuffleBlockForRemoteMerge(
rssConf,
layout,
1,
2,
Lists.newArrayList(shuffleServerInfo),
keyClass,
valueClass,
5,
6,
RECORD_NUMBER,
2));
shuffleWriteClientImpl.sendShuffleData(testAppId, blocks2, () -> false);
Map<Integer, List<ShuffleServerInfo>> partitionToServers =
ImmutableMap.of(
PARTITION_ID,
Lists.newArrayList(shuffleServerInfo),
PARTITION_ID + 1,
Lists.newArrayList(shuffleServerInfo),
PARTITION_ID + 2,
Lists.newArrayList(shuffleServerInfo));
// 4 report shuffle result
Map<Integer, Set<Long>> ptb = new HashMap<>();
for (int i = PARTITION_ID; i < PARTITION_ID + 3; i++) {
final int partitionId = i;
ptb.put(partitionId, new HashSet<>());
ptb.get(partitionId)
.addAll(
blocks1.stream()
.filter(s -> s.getPartitionId() == partitionId)
.map(s -> s.getBlockId())
.collect(Collectors.toList()));
ptb.get(partitionId)
.addAll(
blocks2.stream()
.filter(s -> s.getPartitionId() == partitionId)
.map(s -> s.getBlockId())
.collect(Collectors.toList()));
}
Map<ShuffleServerInfo, Map<Integer, Set<Long>>> serverToPartitionToBlockIds = new HashMap();
serverToPartitionToBlockIds.put(shuffleServerInfo, ptb);
shuffleWriteClientImpl.reportShuffleResult(
serverToPartitionToBlockIds, testAppId, SHUFFLE_ID, 0, 1);
shuffleWriteClientImpl.reportShuffleResult(
serverToPartitionToBlockIds, testAppId, SHUFFLE_ID, 1, 1);
// 5 report unique blocks
for (int i = PARTITION_ID; i < PARTITION_ID + 3; i++) {
Roaring64NavigableMap uniqueBlockIds = Roaring64NavigableMap.bitmapOf();
ptb.get(i).stream().forEach(block -> uniqueBlockIds.add(block));
shuffleWriteClientImpl.startSortMerge(
new HashSet<>(partitionToServers.get(i)), testAppId, SHUFFLE_ID, i, uniqueBlockIds);
}
// 6 read result
Map<Integer, List<ShuffleServerInfo>> serverInfoMap =
ImmutableMap.of(
PARTITION_ID,
Lists.newArrayList(shuffleServerInfo),
PARTITION_ID + 1,
Lists.newArrayList(shuffleServerInfo),
PARTITION_ID + 2,
Lists.newArrayList(shuffleServerInfo));
RMRecordsReader reader =
new RMRecordsReader(
testAppId,
SHUFFLE_ID,
Sets.newHashSet(PARTITION_ID, PARTITION_ID + 1, PARTITION_ID + 2),
serverInfoMap,
rssConf,
keyClass,
valueClass,
comparator,
raw,
combiner,
false,
null,
clientType);
reader.start();
int index = 0;
KeyValueReader keyValueReader = reader.keyValueReader();
while (keyValueReader.hasNext()) {
Record record = keyValueReader.next();
assertEquals(SerializerUtils.genData(keyClass, index), record.getKey());
assertEquals(SerializerUtils.genData(valueClass, index * 2), record.getValue());
index++;
}
assertEquals(6 * RECORD_NUMBER, index);
shuffleWriteClientImpl.unregisterShuffle(testAppId);
}
private static final AtomicInteger ATOMIC_INT_SORTED = new AtomicInteger(0);
public static ShuffleBlockInfo createShuffleBlockForRemoteMerge(
RssConf rssConf,
BlockIdLayout blockIdLayout,
int taskAttemptId,
int partitionId,
List<ShuffleServerInfo> shuffleServerInfoList,
Class keyClass,
Class valueClass,
int start,
int interval,
int samples,
int duplicated)
throws IOException {
long blockId =
blockIdLayout.getBlockId(ATOMIC_INT_SORTED.getAndIncrement(), PARTITION_ID, taskAttemptId);
ByteBuf byteBuf =
SerializerUtils.genSortedRecordBuffer(
rssConf, keyClass, valueClass, start, interval, samples, duplicated);
ByteBuffer byteBuffer = byteBuf.nioBuffer();
return new ShuffleBlockInfo(
SHUFFLE_ID,
partitionId,
blockId,
byteBuf.readableBytes(),
ChecksumUtils.getCrc32(byteBuffer),
byteBuffer.array(),
shuffleServerInfoList,
byteBuf.readableBytes(),
0,
taskAttemptId);
}
}
|
google/j2objc | 33,829 | jre_emul/android/platform/libcore/harmony-tests/src/test/java/org/apache/harmony/tests/java/nio/charset/CharsetEncoderTest.java | /* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.harmony.tests.java.nio.charset;
import java.nio.ByteBuffer;
import java.nio.CharBuffer;
import java.nio.charset.CharacterCodingException;
import java.nio.charset.Charset;
import java.nio.charset.CharsetDecoder;
import java.nio.charset.CharsetEncoder;
import java.nio.charset.CoderResult;
import java.nio.charset.CodingErrorAction;
import java.nio.charset.MalformedInputException;
import java.nio.charset.UnmappableCharacterException;
import java.nio.charset.UnsupportedCharsetException;
import java.util.Arrays;
import junit.framework.TestCase;
/**
* API unit test for java.nio.charset.CharsetEncoder
*/
public class CharsetEncoderTest extends TestCase {
static final int MAX_BYTES = 3;
static final float AVER_BYTES = 0.5f;
// charset for mock class
private static final Charset MOCKCS = new MockCharset("CharsetEncoderTest_mock", new String[0]);
Charset cs = MOCKCS;
// default encoder
CharsetEncoder encoder;
// default for Charset abstract class
byte[] defaultReplacement = new byte[] { 63 };
// specific for Charset implementation subclass
byte[] specifiedReplacement = new byte[] { 63 };
static final String unistr = " buffer";// \u8000\u8001\u00a5\u3000\r\n";
byte[] unibytes = new byte[] { 32, 98, 117, 102, 102, 101, 114 };
byte[] unibytesWithRep = null;
byte[] surrogate = new byte[0];
protected void setUp() throws Exception {
super.setUp();
encoder = cs.newEncoder();
if (null == unibytesWithRep) {
byte[] replacement = encoder.replacement();
unibytesWithRep = new byte[replacement.length + unibytes.length];
System.arraycopy(replacement, 0, unibytesWithRep, 0,
replacement.length);
System.arraycopy(unibytes, 0, unibytesWithRep, replacement.length,
unibytes.length);
}
}
/*
* @see TestCase#tearDown()
*/
protected void tearDown() throws Exception {
super.tearDown();
}
public void testSpecificDefaultValue() {
assertTrue(encoder.averageBytesPerChar() == AVER_BYTES);
assertTrue(encoder.maxBytesPerChar() == MAX_BYTES);
}
public void testDefaultValue() {
assertEquals(CodingErrorAction.REPORT, encoder.malformedInputAction());
assertEquals(CodingErrorAction.REPORT, encoder.unmappableCharacterAction());
assertSame(encoder, encoder.onMalformedInput(CodingErrorAction.IGNORE));
assertSame(encoder, encoder.onUnmappableCharacter(CodingErrorAction.IGNORE));
if (encoder instanceof MockCharsetEncoder) {
assertTrue(Arrays.equals(encoder.replacement(), defaultReplacement));
} else {
assertTrue(Arrays.equals(encoder.replacement(), specifiedReplacement));
}
}
/*
* Class under test for constructor CharsetEncoder(Charset, float, float)
*/
public void testCharsetEncoderCharsetfloatfloat() {
// default value
encoder = new MockCharsetEncoder(cs, (float) AVER_BYTES, MAX_BYTES);
assertSame(encoder.charset(), cs);
assertTrue(encoder.averageBytesPerChar() == AVER_BYTES);
assertTrue(encoder.maxBytesPerChar() == MAX_BYTES);
assertEquals(CodingErrorAction.REPORT, encoder.malformedInputAction());
assertEquals(CodingErrorAction.REPORT, encoder
.unmappableCharacterAction());
assertEquals(new String(encoder.replacement()), new String(
defaultReplacement));
assertSame(encoder, encoder.onMalformedInput(CodingErrorAction.IGNORE));
assertSame(encoder, encoder
.onUnmappableCharacter(CodingErrorAction.IGNORE));
// normal case
CharsetEncoder ec = new MockCharsetEncoder(cs, 1, MAX_BYTES);
assertSame(ec.charset(), cs);
assertEquals(1.0, ec.averageBytesPerChar(), 0);
assertTrue(ec.maxBytesPerChar() == MAX_BYTES);
/*
* ------------------------ Exceptional cases -------------------------
*/
// NullPointerException: null charset
try {
ec = new MockCharsetEncoder(null, 1, MAX_BYTES);
fail("should throw null pointer exception");
} catch (NullPointerException e) {
}
ec = new MockCharsetEncoder(new MockCharset("mock", new String[0]), 1,
MAX_BYTES);
// Commented out since the comment is wrong since MAX_BYTES > 1
// // OK: average length less than max length
// ec = new MockCharsetEncoder(cs, MAX_BYTES, 1);
// assertTrue(ec.averageBytesPerChar() == MAX_BYTES);
// assertTrue(ec.maxBytesPerChar() == 1);
// Illegal Argument: zero length
try {
ec = new MockCharsetEncoder(cs, 0, MAX_BYTES);
fail("should throw IllegalArgumentException");
} catch (IllegalArgumentException e) {
}
try {
ec = new MockCharsetEncoder(cs, 1, 0);
fail("should throw IllegalArgumentException");
} catch (IllegalArgumentException e) {
}
// Illegal Argument: negative length
try {
ec = new MockCharsetEncoder(cs, -1, MAX_BYTES);
fail("should throw IllegalArgumentException");
} catch (IllegalArgumentException e) {
}
try {
ec = new MockCharsetEncoder(cs, 1, -1);
fail("should throw IllegalArgumentException");
} catch (IllegalArgumentException e) {
}
}
/*
* Class under test for constructor CharsetEncoder(Charset, float, float,
* byte[])
*/
public void testCharsetEncoderCharsetfloatfloatbyteArray() {
byte[] ba = getLegalByteArray();
// normal case
CharsetEncoder ec = new MockCharsetEncoder(cs, 1, MAX_BYTES, ba);
assertSame(ec.charset(), cs);
assertEquals(1.0, ec.averageBytesPerChar(), 0.0);
assertTrue(ec.maxBytesPerChar() == MAX_BYTES);
assertTrue(Arrays.equals(ba, ec.replacement()));
/*
* ------------------------ Exceptional cases -------------------------
*/
// NullPointerException: null charset
try {
ec = new MockCharsetEncoder(null, 1, MAX_BYTES, ba);
fail("should throw null pointer exception");
} catch (NullPointerException e) {
}
// Illegal Argument: null byte array
try {
ec = new MockCharsetEncoder(cs, 1, MAX_BYTES, null);
fail("should throw IllegalArgumentException");
} catch (IllegalArgumentException e) {
}
// Illegal Argument: empty byte array
try {
ec = new MockCharsetEncoder(cs, 1, MAX_BYTES, new byte[0]);
fail("should throw IllegalArgumentException");
} catch (IllegalArgumentException e) {
}
// Illegal Argument: byte array is longer than max length
try {
ec = new MockCharsetEncoder(cs, 1, MAX_BYTES, new byte[] { 1, 2,
MAX_BYTES, 4 });
fail("should throw IllegalArgumentException");
} catch (IllegalArgumentException e) {
}
// Commented out since the comment is wrong since MAX_BYTES > 1
// This test throws IllegalArgumentException on Harmony and RI
// // OK: average length less than max length
// ec = new MockCharsetEncoder(cs, MAX_BYTES, ba.length, ba);
// assertTrue(ec.averageBytesPerChar() == MAX_BYTES);
// assertTrue(ec.maxBytesPerChar() == ba.length);
// Illegal Argument: zero length
try {
ec = new MockCharsetEncoder(cs, 0, MAX_BYTES, ba);
fail("should throw IllegalArgumentException");
} catch (IllegalArgumentException e) {
}
try {
ec = new MockCharsetEncoder(cs, 1, 0, ba);
fail("should throw IllegalArgumentException");
} catch (IllegalArgumentException e) {
}
// Illegal Argument: negative length
try {
ec = new MockCharsetEncoder(cs, -1, MAX_BYTES, ba);
fail("should throw IllegalArgumentException");
} catch (IllegalArgumentException e) {
}
try {
ec = new MockCharsetEncoder(cs, 1, -1, ba);
fail("should throw IllegalArgumentException");
} catch (IllegalArgumentException e) {
}
}
/*
* Class under test for boolean canEncode(char)
*/
public void testCanEncodechar() throws CharacterCodingException {
// for non-mapped char
assertTrue(encoder.canEncode('\uc2c0'));
// surrogate char for unicode
// 1st byte: d800-dbff
// 2nd byte: dc00-dfff
assertTrue(encoder.canEncode('\ud800'));
// valid surrogate pair
assertTrue(encoder.canEncode('\udc00'));
}
/*-----------------------------------------
* Class under test for illegal state case
* methods which can change internal states are two encode, flush, two canEncode, reset
* -----------------------------------------
*/
// Normal case: just after reset, and it also means reset can be done
// anywhere
public void testResetIllegalState() throws CharacterCodingException {
assertSame(encoder, encoder.reset());
encoder.canEncode('\ud901');
assertSame(encoder, encoder.reset());
encoder.canEncode("\ud901\udc00");
assertSame(encoder, encoder.reset());
encoder.encode(CharBuffer.wrap("aaa"));
assertSame(encoder, encoder.reset());
encoder.encode(CharBuffer.wrap("aaa"), ByteBuffer.allocate(3), false);
assertSame(encoder, encoder.reset());
encoder.encode(CharBuffer.wrap("aaa"), ByteBuffer.allocate(3), true);
assertSame(encoder, encoder.reset());
}
public void testFlushIllegalState() throws CharacterCodingException {
CharBuffer in = CharBuffer.wrap("aaa");
ByteBuffer out = ByteBuffer.allocate(5);
// Illegal state: after reset.
encoder.reset();
try {
encoder.flush(out);
fail();
} catch (IllegalStateException expected) {
}
// Normal case: after encode with endOfInput is true
assertSame(encoder, encoder.reset());
encoder.encode(in, out, true);
out.rewind();
CoderResult result = encoder.flush(out);
// Good state: flush twice
encoder.flush(out);
// Illegal state: flush after encode with endOfInput is false
assertSame(encoder, encoder.reset());
encoder.encode(in, out, false);
try {
encoder.flush(out);
fail();
} catch (IllegalStateException expected) {
}
}
public void testFlushAfterConstructing() {
ByteBuffer out = ByteBuffer.allocate(5);
//Illegal state: flush after instance created
try {
encoder.flush(out);
fail("should throw IllegalStateException");
} catch (IllegalStateException e) {
// Expected
}
}
// test illegal states for encode facade
public void testEncodeFacadeIllegalState() throws CharacterCodingException {
// encode facade can be execute in anywhere
CharBuffer in = CharBuffer.wrap("aaa");
// Normal case: just created
encoder.encode(in);
in.rewind();
// Normal case: just after encode facade
encoder.encode(in);
in.rewind();
// Normal case: just after canEncode
assertSame(encoder, encoder.reset());
encoder.canEncode("\ud902\udc00");
encoder.encode(in);
in.rewind();
assertSame(encoder, encoder.reset());
encoder.canEncode('\ud902');
encoder.encode(in);
in.rewind();
// Normal case: just after encode with that endOfInput is true
assertSame(encoder, encoder.reset());
encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState2"),
ByteBuffer.allocate(30), true);
encoder.encode(in);
in.rewind();
// Normal case:just after encode with that endOfInput is false
assertSame(encoder, encoder.reset());
encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState3"),
ByteBuffer.allocate(30), false);
encoder.encode(in);
in.rewind();
// Normal case: just after flush
assertSame(encoder, encoder.reset());
encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState4"),
ByteBuffer.allocate(30), true);
encoder.flush(ByteBuffer.allocate(10));
encoder.encode(in);
in.rewind();
}
// test illegal states for two encode method with endOfInput is true
public void testEncodeTrueIllegalState() throws CharacterCodingException {
CharBuffer in = CharBuffer.wrap("aaa");
ByteBuffer out = ByteBuffer.allocate(5);
// Normal case: just created
encoder.encode(in, out, true);
in.rewind();
out.rewind();
in.rewind();
out.rewind();
// Normal case: just after encode with that endOfInput is true
assertSame(encoder, encoder.reset());
encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState2"),
ByteBuffer.allocate(30), true);
encoder.encode(in, out, true);
in.rewind();
out.rewind();
// Normal case:just after encode with that endOfInput is false
assertSame(encoder, encoder.reset());
encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState3"),
ByteBuffer.allocate(30), false);
encoder.encode(in, out, true);
in.rewind();
out.rewind();
// Illegal state: just after flush
assertSame(encoder, encoder.reset());
encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState4"),
ByteBuffer.allocate(30), true);
encoder.flush(ByteBuffer.allocate(10));
try {
encoder.encode(in, out, true);
fail("should illegal state");
} catch (IllegalStateException e) {
}
// Normal case: after canEncode
assertSame(encoder, encoder.reset());
encoder.canEncode("\ud906\udc00");
encoder.encode(in, out, true);
in.rewind();
out.rewind();
assertSame(encoder, encoder.reset());
encoder.canEncode('\ud905');
encoder.encode(in, out, true);
}
// test illegal states for two encode method with endOfInput is false
public void testEncodeFalseIllegalState() throws CharacterCodingException {
CharBuffer in = CharBuffer.wrap("aaa");
ByteBuffer out = ByteBuffer.allocate(5);
// Normal case: just created
encoder.encode(in, out, false);
in.rewind();
out.rewind();
// Illegal state: just after encode facade
assertSame(encoder, encoder.reset());
encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState1"));
try {
encoder.encode(in, out, false);
fail("should illegal state");
} catch (IllegalStateException e) {
}
// Illegal state: just after encode with that endOfInput is true
assertSame(encoder, encoder.reset());
encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState2"),
ByteBuffer.allocate(30), true);
try {
encoder.encode(in, out, false);
fail("should illegal state");
} catch (IllegalStateException e) {
}
// Normal case:just after encode with that endOfInput is false
assertSame(encoder, encoder.reset());
encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState3"),
ByteBuffer.allocate(30), false);
encoder.encode(in, out, false);
in.rewind();
out.rewind();
// Illegal state: just after flush
assertSame(encoder, encoder.reset());
encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState4"),
ByteBuffer.allocate(30), true);
encoder.flush(ByteBuffer.allocate(10));
try {
encoder.encode(in, out, false);
fail("should illegal state");
} catch (IllegalStateException e) {
}
// Normal case: after canEncode
assertSame(encoder, encoder.reset());
encoder.canEncode("\ud906\udc00");
encoder.encode(in, out, false);
in.rewind();
out.rewind();
assertSame(encoder, encoder.reset());
encoder.canEncode('\ud905');
encoder.encode(in, out, false);
}
// test illegal states for two canEncode methods
public void testCanEncodeIllegalState() throws CharacterCodingException {
// Normal case: just created
encoder.canEncode("\ud900\udc00");
encoder.canEncode('\ud900');
// Illegal state: just after encode with that endOfInput is true
assertSame(encoder, encoder.reset());
encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState2"),
ByteBuffer.allocate(30), true);
try {
encoder.canEncode("\ud903\udc00");
fail("should throw illegal state exception");
} catch (IllegalStateException e) {
}
// Illegal state:just after encode with that endOfInput is false
assertSame(encoder, encoder.reset());
encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState3"),
ByteBuffer.allocate(30), false);
try {
encoder.canEncode("\ud904\udc00");
fail("should throw illegal state exception");
} catch (IllegalStateException e) {
}
// Normal case: just after flush
encoder.encode(CharBuffer.wrap("testCanEncodeIllegalState4"),
ByteBuffer.allocate(30), true);
encoder.flush(ByteBuffer.allocate(10));
encoder.canEncode("\ud905\udc00");
encoder.canEncode('\ud906');
// Normal case: after reset again
assertSame(encoder, encoder.reset());
encoder.canEncode("\ud906\udc00");
encoder.canEncode('\ud905');
}
/*
* --------------------------------- illegal state test end
* ---------------------------------
*/
/*
* Class under test for boolean canEncode(CharSequence)
*/
public void testCanEncodeCharSequence() {
// for non-mapped char
assertTrue(encoder.canEncode("\uc2c0"));
// surrogate char for unicode
// 1st byte: d800-dbff
// 2nd byte: dc00-dfff
// valid surrogate pair
assertTrue(encoder.canEncode("\ud800\udc00"));
// invalid surrogate pair
assertTrue(encoder.canEncode("\ud800\udb00"));
}
public void test_canEncode_empty() throws Exception {
assertTrue(encoder.canEncode(""));
}
public void test_canEncode_null() throws Exception {
try {
encoder.canEncode(null);
fail();
} catch (NullPointerException e) {
}
}
/*
* Class under test for Charset charset()
*/
public void testCharset() {
try {
encoder = new MockCharsetEncoder(Charset.forName("gbk"), 1,
MAX_BYTES);
// assertSame(encoder.charset(), Charset.forName("gbk"));
} catch (UnsupportedCharsetException e) {
System.err
.println("Don't support GBK encoding, ignore current test");
}
}
/*
* Class under test for ByteBuffer encode(CharBuffer)
*/
public void testEncodeCharBuffer() throws CharacterCodingException {
// Null pointer
try {
encoder.encode(null);
fail("should throw null pointer exception");
} catch (NullPointerException e) {
}
// empty input buffer
ByteBuffer out = encoder.encode(CharBuffer.wrap(""));
assertEquals(out.position(), 0);
assertByteArray(out, new byte[0]);
// assertByteArray(out, surrogate);
// normal case
out = encoder.encode(CharBuffer.wrap(unistr));
assertEquals(out.position(), 0);
assertByteArray(out, addSurrogate(unibytes));
// Regression test for harmony-3378
Charset cs = Charset.forName("UTF-8");
CharsetEncoder encoder = cs.newEncoder();
encoder.onMalformedInput(CodingErrorAction.REPLACE);
encoder = encoder.replaceWith(new byte[] { (byte) 0xef, (byte) 0xbf,
(byte) 0xbd, });
CharBuffer in = CharBuffer.wrap("\ud800");
out = encoder.encode(in);
assertNotNull(out);
}
private byte[] addSurrogate(byte[] expected) {
if (surrogate.length > 0) {
byte[] temp = new byte[surrogate.length + expected.length];
System.arraycopy(surrogate, 0, temp, 0, surrogate.length);
System.arraycopy(expected, 0, temp, surrogate.length,
expected.length);
expected = temp;
}
return expected;
}
/**
* @return
*/
protected byte[] getEmptyByteArray() {
return new byte[0];
}
CharBuffer getMalformedCharBuffer() {
return CharBuffer.wrap("malform buffer");
}
CharBuffer getUnmapCharBuffer() {
return CharBuffer.wrap("unmap buffer");
}
CharBuffer getExceptionCharBuffer() {
return CharBuffer.wrap("runtime buffer");
}
public void testEncodeCharBufferException() throws CharacterCodingException {
ByteBuffer out;
CharBuffer in;
// MalformedException:
in = getMalformedCharBuffer();
encoder.onMalformedInput(CodingErrorAction.REPORT);
encoder.onUnmappableCharacter(CodingErrorAction.REPORT);
if (in != null) {
try {
// regression test for Harmony-1379
encoder.encode(in);
fail("should throw MalformedInputException");
} catch (MalformedInputException e) {
}
encoder.reset();
in.rewind();
encoder.onMalformedInput(CodingErrorAction.IGNORE);
out = encoder.encode(in);
assertByteArray(out, addSurrogate(unibytes));
encoder.reset();
in.rewind();
encoder.onMalformedInput(CodingErrorAction.REPLACE);
out = encoder.encode(in);
assertByteArray(out, addSurrogate(unibytesWithRep));
}
// Unmapped Exception:
in = getUnmapCharBuffer();
encoder.onMalformedInput(CodingErrorAction.REPORT);
encoder.onUnmappableCharacter(CodingErrorAction.REPORT);
if (in != null) {
encoder.reset();
try {
encoder.encode(in);
fail("should throw UnmappableCharacterException");
} catch (UnmappableCharacterException e) {
}
encoder.reset();
in.rewind();
encoder.onUnmappableCharacter(CodingErrorAction.IGNORE);
out = encoder.encode(in);
assertByteArray(out, unibytes);
encoder.reset();
in.rewind();
encoder.onUnmappableCharacter(CodingErrorAction.REPLACE);
out = encoder.encode(in);
assertByteArray(out, unibytesWithRep);
}
// RuntimeException
try {
encoder.encode(getExceptionCharBuffer());
fail("should throw runtime exception");
} catch (RuntimeException e) {
}
}
/*
* utility method, extract given bytebuffer to a string and compare with
* give string
*/
void assertByteArray(ByteBuffer out, byte[] expected) {
out = out.duplicate();
if (out.position() != 0) {
out.flip();
}
byte[] ba = new byte[out.limit() - out.position()];
out.get(ba);
// byte[] ba = out.array();
assertTrue(Arrays.equals(ba, expected));
}
/*
* Class under test for CoderResult encode(CharBuffer, ByteBuffer, boolean)
*/
public void testEncodeCharBufferByteBufferboolean()
throws CharacterCodingException {
ByteBuffer out = ByteBuffer.allocate(200);
CharBuffer in = CharBuffer.wrap(unistr);
// Null pointer
try {
encoder.encode(null, out, true);
fail("should throw null pointer exception");
} catch (NullPointerException e) {
}
try {
encoder.encode(in, null, true);
fail("should throw null pointer exception");
} catch (NullPointerException e) {
}
// normal case, one complete operation
assertSame(encoder, encoder.reset());
in.rewind();
out.rewind();
assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, true));
assertEquals(out.limit(), 200);
assertTrue(out.position() > 0);
assertTrue(out.remaining() > 0);
assertEquals(out.capacity(), 200);
assertByteArray(out, addSurrogate(unibytes));
in.rewind();
encoder.flush(out);
// normal case, one complete operation, but call twice, first time set
// endOfInput to false
assertSame(encoder, encoder.reset());
in.rewind();
out = ByteBuffer.allocate(200);
assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, false));
assertEquals(out.limit(), 200);
assertTrue(out.position() > 0);
assertTrue(out.remaining() > 0);
assertEquals(out.capacity(), 200);
assertByteArray(out, addSurrogate(unibytes));
in.rewind();
assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, false));
in.rewind();
assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, true));
assertEquals(out.limit(), 200);
assertTrue(out.position() > 0);
assertTrue(out.remaining() > 0);
assertEquals(out.capacity(), 200);
assertByteArray(out, addSurrogate(duplicateByteArray(unibytes, 3)));
// overflow
out = ByteBuffer.allocate(4);
assertSame(encoder, encoder.reset());
in.rewind();
out.rewind();
assertSame(CoderResult.OVERFLOW, encoder.encode(in, out, true));
assertEquals(out.limit(), 4);
assertEquals(out.position(), 4);
assertEquals(out.remaining(), 0);
assertEquals(out.capacity(), 4);
ByteBuffer temp = ByteBuffer.allocate(200);
out.flip();
temp.put(out);
out = temp;
assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, true));
assertEquals(out.limit(), 200);
assertTrue(out.position() > 0);
assertTrue(out.remaining() > 0);
assertEquals(out.capacity(), 200);
assertByteArray(out, addSurrogate(unibytes));
assertSame(encoder, encoder.reset());
in.rewind();
out = ByteBuffer.allocate(4);
assertSame(CoderResult.OVERFLOW, encoder.encode(in, out, false));
assertEquals(out.limit(), 4);
assertEquals(out.position(), 4);
assertEquals(out.remaining(), 0);
assertEquals(out.capacity(), 4);
temp = ByteBuffer.allocate(200);
out.flip();
temp.put(out);
out = temp;
assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, false));
assertEquals(out.limit(), 200);
assertTrue(out.position() > 0);
assertTrue(out.remaining() > 0);
assertEquals(out.capacity(), 200);
assertByteArray(out, addSurrogate(unibytes));
}
// void printByteBuffer(ByteBuffer buffer) {
// System.out.println("print buffer");
// if (buffer.position() != 0) {
// buffer.flip();
// }
// byte[] ba = buffer.array();
// for (int i = 0; i < ba.length; i++) {
// System.out.println(Integer.toHexString(ba[i]));
// }
// }
public void testEncodeCharBufferByteBufferbooleanExceptionFalse()
throws CharacterCodingException {
implTestEncodeCharBufferByteBufferbooleanException(false);
}
public void testEncodeCharBufferByteBufferbooleanExceptionTrue()
throws CharacterCodingException {
implTestEncodeCharBufferByteBufferbooleanException(true);
}
private byte[] duplicateByteArray(byte[] ba, int times) {
byte[] result = new byte[ba.length * times];
for (int i = 0; i < times; i++) {
System.arraycopy(ba, 0, result, i * ba.length, ba.length);
}
return result;
}
protected void implTestEncodeCharBufferByteBufferbooleanException(
boolean endOfInput) throws CharacterCodingException {
ByteBuffer out = ByteBuffer.allocate(100);
// MalformedException:
CharBuffer in = getMalformedCharBuffer();
encoder.onMalformedInput(CodingErrorAction.REPORT);
encoder.onUnmappableCharacter(CodingErrorAction.REPORT);
if (in != null) {
encoder.reset();
CoderResult r = encoder.encode(in, out, endOfInput);
assertTrue(r.isMalformed());
encoder.reset();
out.clear();
in.rewind();
encoder.onMalformedInput(CodingErrorAction.IGNORE);
assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out,
endOfInput));
assertCodingErrorAction(endOfInput, out, in, unibytes);
encoder.reset();
out.clear();
in.rewind();
encoder.onMalformedInput(CodingErrorAction.REPLACE);
assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out,
endOfInput));
assertCodingErrorAction(endOfInput, out, in, unibytesWithRep);
// } else {
// System.out.println("Cannot find malformed char buffer for "
// + cs.name());
}
// Unmapped Exception:
in = getUnmapCharBuffer();
encoder.onMalformedInput(CodingErrorAction.REPORT);
encoder.onUnmappableCharacter(CodingErrorAction.REPORT);
if (in != null) {
encoder.reset();
out.clear();
assertTrue(encoder.encode(in, out, endOfInput).isUnmappable());
encoder.reset();
out.clear();
in.rewind();
encoder.onUnmappableCharacter(CodingErrorAction.IGNORE);
assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out,
endOfInput));
assertCodingErrorAction(endOfInput, out, in, unibytes);
encoder.reset();
out.clear();
in.rewind();
encoder.onUnmappableCharacter(CodingErrorAction.REPLACE);
assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out,
endOfInput));
assertCodingErrorAction(endOfInput, out, in, unibytesWithRep);
// } else {
// System.out.println("Cannot find unmapped char buffer for "
// + cs.name());
}
// RuntimeException
try {
encoder.encode(getExceptionCharBuffer());
fail("should throw runtime exception");
} catch (RuntimeException e) {
}
}
private void assertCodingErrorAction(boolean endOfInput, ByteBuffer out,
CharBuffer in, byte[] expect) {
if (endOfInput) {
assertByteArray(out, addSurrogate(expect));
} else {
in.rewind();
assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out,
endOfInput));
in.rewind();
assertSame(CoderResult.UNDERFLOW, encoder.encode(in, out, true));
assertByteArray(out, addSurrogate(duplicateByteArray(expect, 3)));
}
}
/*
* Class under test for CoderResult flush(ByteBuffer)
*/
public void testFlush() throws CharacterCodingException {
ByteBuffer out = ByteBuffer.allocate(6);
CharBuffer in = CharBuffer.wrap("aaa");
assertEquals(in.remaining(), 3);
// by encode facade, so that internal state will be wrong
encoder.encode(CharBuffer.wrap("testFlush"), ByteBuffer.allocate(20),
true);
assertSame(CoderResult.UNDERFLOW, encoder
.flush(ByteBuffer.allocate(50)));
}
/*
* test isLegalReplacement(byte[])
*/
public void test_isLegalReplacement_null() {
try {
encoder.isLegalReplacement(null);
fail("should throw null pointer exception");
} catch (NullPointerException e) {
}
}
public void test_isLegalReplacement_good() {
assertTrue(encoder.isLegalReplacement(specifiedReplacement));
}
public void test_isLegalReplacement_bad() {
assertTrue(encoder.isLegalReplacement(new byte[200]));
byte[] ba = getIllegalByteArray();
if (ba != null) {
assertFalse(encoder.isLegalReplacement(ba));
}
}
public void test_isLegalReplacement_empty_array() {
// ISO, ASC, GB, UTF8 encoder will throw exception in RI
// others will pass
assertTrue(encoder.isLegalReplacement(new byte[0]));
}
public void testOnMalformedInput() {
assertSame(CodingErrorAction.REPORT, encoder.malformedInputAction());
try {
encoder.onMalformedInput(null);
fail("should throw null pointer exception");
} catch (IllegalArgumentException e) {
}
encoder.onMalformedInput(CodingErrorAction.IGNORE);
assertSame(CodingErrorAction.IGNORE, encoder.malformedInputAction());
}
public void testOnUnmappableCharacter() {
assertSame(CodingErrorAction.REPORT, encoder
.unmappableCharacterAction());
try {
encoder.onUnmappableCharacter(null);
fail("should throw null pointer exception");
} catch (IllegalArgumentException e) {
}
encoder.onUnmappableCharacter(CodingErrorAction.IGNORE);
assertSame(CodingErrorAction.IGNORE, encoder
.unmappableCharacterAction());
}
public void testReplacement() {
try {
encoder.replaceWith(null);
fail("should throw null pointer exception");
} catch (IllegalArgumentException e) {
}
try {
encoder.replaceWith(new byte[0]);
fail("should throw null pointer exception");
} catch (IllegalArgumentException e) {
}
try {
encoder.replaceWith(new byte[100]);
fail("should throw null pointer exception");
} catch (IllegalArgumentException e) {
}
byte[] nr = getLegalByteArray();
assertSame(encoder, encoder.replaceWith(nr));
assertTrue(Arrays.equals(nr, encoder.replacement()));
nr = getIllegalByteArray();
try {
encoder.replaceWith(new byte[100]);
fail();
} catch (IllegalArgumentException e) {
}
}
protected byte[] getLegalByteArray() {
return new byte[] { 'a' };
}
protected byte[] getIllegalByteArray() {
return new byte[155];
}
/*
* Mock subclass of CharsetEncoder For protected method test
*/
public static class MockCharsetEncoder extends CharsetEncoder {
boolean flushed = false;
public boolean isFlushed() {
boolean result = flushed;
flushed = false;
return result;
}
public boolean isLegalReplacement(byte[] ba) {
if (ba.length == 155) {// specified magic number, return false
return false;
}
return super.isLegalReplacement(ba);
}
public MockCharsetEncoder(Charset cs, float aver, float max) {
super(cs, aver, max);
}
public MockCharsetEncoder(Charset cs, float aver, float max,
byte[] replacement) {
super(cs, aver, max, replacement);
}
protected CoderResult encodeLoop(CharBuffer in, ByteBuffer out) {
int inPosition = in.position();
char[] input = new char[in.remaining()];
in.get(input);
String result = new String(input);
if (result.startsWith("malform")) {
// reset the cursor to the error position
in.position(inPosition);
// in.position(0);
// set the error length
return CoderResult.malformedForLength("malform".length());
} else if (result.startsWith("unmap")) {
// reset the cursor to the error position
in.position(inPosition);
// in.position(0);
// set the error length
return CoderResult.unmappableForLength("unmap".length());
} else if (result.startsWith("runtime")) {
// reset the cursor to the error position
in.position(0);
// set the error length
throw new RuntimeException("runtime");
}
int inLeft = input.length;
int outLeft = out.remaining();
CoderResult r = CoderResult.UNDERFLOW;
int length = inLeft;
if (outLeft < inLeft) {
r = CoderResult.OVERFLOW;
length = outLeft;
in.position(inPosition + outLeft);
}
for (int i = 0; i < length; i++) {
out.put((byte) input[i]);
}
return r;
}
protected CoderResult implFlush(ByteBuffer out) {
CoderResult result = super.implFlush(out);
int length = 0;
if (out.remaining() >= 5) {
length = 5;
result = CoderResult.UNDERFLOW;
flushed = true;
// for (int i = 0; i < length; i++) {
// out.put((byte)'f');
// }
} else {
length = out.remaining();
result = CoderResult.OVERFLOW;
}
return result;
}
protected void implReplaceWith(byte[] ba) {
assertTrue(Arrays.equals(ba, replacement()));
}
}
/*
* mock charset for test encoder initialization
*/
public static class MockCharset extends Charset {
protected MockCharset(String arg0, String[] arg1) {
super(arg0, arg1);
}
public boolean contains(Charset arg0) {
return false;
}
public CharsetDecoder newDecoder() {
return new CharsetDecoderTest.MockCharsetDecoder(this,
(float) AVER_BYTES, MAX_BYTES);
}
public CharsetEncoder newEncoder() {
return new MockCharsetEncoder(this, (float) AVER_BYTES, MAX_BYTES);
}
}
}
|
apache/hbase | 36,061 | hbase-client/src/main/java/org/apache/hadoop/hbase/client/AdminOverAsyncAdmin.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.client;
import static org.apache.hadoop.hbase.client.ConnectionUtils.setCoprocessorError;
import static org.apache.hadoop.hbase.util.FutureUtils.get;
import java.io.IOException;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Future;
import java.util.regex.Pattern;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CacheEvictionStats;
import org.apache.hadoop.hbase.ClusterMetrics;
import org.apache.hadoop.hbase.ClusterMetrics.Option;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.NamespaceNotFoundException;
import org.apache.hadoop.hbase.RegionMetrics;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableExistsException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.client.replication.TableCFs;
import org.apache.hadoop.hbase.client.security.SecurityCapability;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
import org.apache.hadoop.hbase.net.Address;
import org.apache.hadoop.hbase.quotas.QuotaFilter;
import org.apache.hadoop.hbase.quotas.QuotaSettings;
import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshotView;
import org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException;
import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
import org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
import org.apache.hadoop.hbase.replication.SyncReplicationState;
import org.apache.hadoop.hbase.rsgroup.RSGroupInfo;
import org.apache.hadoop.hbase.security.access.GetUserPermissionsRequest;
import org.apache.hadoop.hbase.security.access.Permission;
import org.apache.hadoop.hbase.security.access.UserPermission;
import org.apache.hadoop.hbase.snapshot.HBaseSnapshotException;
import org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
import org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
import org.apache.hadoop.hbase.snapshot.UnknownSnapshotException;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.protobuf.Descriptors.MethodDescriptor;
import org.apache.hbase.thirdparty.com.google.protobuf.Message;
import org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback;
import org.apache.hbase.thirdparty.com.google.protobuf.RpcChannel;
import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;
import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;
/**
* The {@link Admin} implementation which is based on an {@link AsyncAdmin}.
*/
@InterfaceAudience.Private
class AdminOverAsyncAdmin implements Admin {
private static final Logger LOG = LoggerFactory.getLogger(AdminOverAsyncAdmin.class);
private volatile boolean aborted = false;
private final Connection conn;
private final RawAsyncHBaseAdmin admin;
private final int operationTimeout;
private final int syncWaitTimeout;
public AdminOverAsyncAdmin(Connection conn, RawAsyncHBaseAdmin admin) {
this.conn = conn;
this.admin = admin;
this.operationTimeout = conn.getConfiguration().getInt(
HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, HConstants.DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT);
this.syncWaitTimeout =
conn.getConfiguration().getInt("hbase.client.sync.wait.timeout.msec", 10 * 60000); // 10min
}
@Override
public int getOperationTimeout() {
return operationTimeout;
}
@Override
public int getSyncWaitTimeout() {
return syncWaitTimeout;
}
@Override
public void abort(String why, Throwable e) {
LOG.warn("Aborting becasue of {}", why, e);
this.aborted = true;
}
@Override
public boolean isAborted() {
return aborted;
}
@Override
public Connection getConnection() {
return conn;
}
@Override
public boolean tableExists(TableName tableName) throws IOException {
return get(admin.tableExists(tableName));
}
@Override
public List<TableDescriptor> listTableDescriptors() throws IOException {
return get(admin.listTableDescriptors());
}
@Override
public List<TableDescriptor> listTableDescriptors(boolean includeSysTables) throws IOException {
return get(admin.listTableDescriptors(includeSysTables));
}
@Override
public List<TableDescriptor> listTableDescriptors(Pattern pattern, boolean includeSysTables)
throws IOException {
return get(admin.listTableDescriptors(pattern, includeSysTables));
}
@Override
public List<TableDescriptor> listTableDescriptorsByState(boolean isEnabled) throws IOException {
return get(admin.listTableDescriptorsByState(isEnabled));
}
@Override
public TableName[] listTableNames() throws IOException {
return get(admin.listTableNames()).toArray(new TableName[0]);
}
@Override
public TableName[] listTableNames(Pattern pattern, boolean includeSysTables) throws IOException {
return get(admin.listTableNames(pattern, includeSysTables)).toArray(new TableName[0]);
}
@Override
public List<TableName> listTableNamesByState(boolean isEnabled) throws IOException {
return get(admin.listTableNamesByState(isEnabled));
}
@Override
public TableDescriptor getDescriptor(TableName tableName)
throws TableNotFoundException, IOException {
return get(admin.getDescriptor(tableName));
}
@Override
public void createTable(TableDescriptor desc, byte[] startKey, byte[] endKey, int numRegions)
throws IOException {
get(admin.createTable(desc, startKey, endKey, numRegions));
}
@Override
public Future<Void> createTableAsync(TableDescriptor desc) throws IOException {
return admin.createTable(desc);
}
@Override
public Future<Void> createTableAsync(TableDescriptor desc, byte[][] splitKeys)
throws IOException {
return admin.createTable(desc, splitKeys);
}
@Override
public Future<Void> deleteTableAsync(TableName tableName) throws IOException {
return admin.deleteTable(tableName);
}
@Override
public Future<Void> truncateTableAsync(TableName tableName, boolean preserveSplits)
throws IOException {
return admin.truncateTable(tableName, preserveSplits);
}
@Override
public Future<Void> enableTableAsync(TableName tableName) throws IOException {
return admin.enableTable(tableName);
}
@Override
public Future<Void> disableTableAsync(TableName tableName) throws IOException {
return admin.disableTable(tableName);
}
@Override
public boolean isTableEnabled(TableName tableName) throws IOException {
return get(admin.isTableEnabled(tableName));
}
@Override
public boolean isTableDisabled(TableName tableName) throws IOException {
return get(admin.isTableDisabled(tableName));
}
@Override
public boolean isTableAvailable(TableName tableName) throws IOException {
return get(admin.isTableAvailable(tableName));
}
@Override
public Future<Void> addColumnFamilyAsync(TableName tableName, ColumnFamilyDescriptor columnFamily)
throws IOException {
return admin.addColumnFamily(tableName, columnFamily);
}
@Override
public Future<Void> deleteColumnFamilyAsync(TableName tableName, byte[] columnFamily)
throws IOException {
return admin.deleteColumnFamily(tableName, columnFamily);
}
@Override
public Future<Void> modifyColumnFamilyAsync(TableName tableName,
ColumnFamilyDescriptor columnFamily) throws IOException {
return admin.modifyColumnFamily(tableName, columnFamily);
}
@Override
public Future<Void> modifyColumnFamilyStoreFileTrackerAsync(TableName tableName, byte[] family,
String dstSFT) throws IOException {
return admin.modifyColumnFamilyStoreFileTracker(tableName, family, dstSFT);
}
@Override
public List<RegionInfo> getRegions(ServerName serverName) throws IOException {
return get(admin.getRegions(serverName));
}
@Override
public void flush(TableName tableName) throws IOException {
get(admin.flush(tableName));
}
@Override
public void flush(TableName tableName, byte[] columnFamily) throws IOException {
get(admin.flush(tableName, columnFamily));
}
@Override
public void flush(TableName tableName, List<byte[]> columnFamilies) throws IOException {
get(admin.flush(tableName, columnFamilies));
}
@Override
public void flushRegion(byte[] regionName) throws IOException {
get(admin.flushRegion(regionName));
}
@Override
public void flushRegion(byte[] regionName, byte[] columnFamily) throws IOException {
get(admin.flushRegion(regionName, columnFamily));
}
@Override
public void flushRegionServer(ServerName serverName) throws IOException {
get(admin.flushRegionServer(serverName));
}
@Override
public void compact(TableName tableName) throws IOException {
get(admin.compact(tableName));
}
@Override
public void compactRegion(byte[] regionName) throws IOException {
get(admin.compactRegion(regionName));
}
@Override
public void compact(TableName tableName, byte[] columnFamily) throws IOException {
get(admin.compact(tableName, columnFamily));
}
@Override
public void compactRegion(byte[] regionName, byte[] columnFamily) throws IOException {
get(admin.compactRegion(regionName, columnFamily));
}
@Override
public void compact(TableName tableName, CompactType compactType)
throws IOException, InterruptedException {
get(admin.compact(tableName, compactType));
}
@Override
public void compact(TableName tableName, byte[] columnFamily, CompactType compactType)
throws IOException, InterruptedException {
get(admin.compact(tableName, columnFamily, compactType));
}
@Override
public void majorCompact(TableName tableName) throws IOException {
get(admin.majorCompact(tableName));
}
@Override
public void majorCompactRegion(byte[] regionName) throws IOException {
get(admin.majorCompactRegion(regionName));
}
@Override
public void majorCompact(TableName tableName, byte[] columnFamily) throws IOException {
get(admin.majorCompact(tableName, columnFamily));
}
@Override
public void majorCompactRegion(byte[] regionName, byte[] columnFamily) throws IOException {
get(admin.majorCompactRegion(regionName, columnFamily));
}
@Override
public void majorCompact(TableName tableName, CompactType compactType)
throws IOException, InterruptedException {
get(admin.majorCompact(tableName, compactType));
}
@Override
public void majorCompact(TableName tableName, byte[] columnFamily, CompactType compactType)
throws IOException, InterruptedException {
get(admin.majorCompact(tableName, columnFamily, compactType));
}
@Override
public Map<ServerName, Boolean> compactionSwitch(boolean switchState,
List<String> serverNamesList) throws IOException {
return get(admin.compactionSwitch(switchState, serverNamesList));
}
@Override
public void compactRegionServer(ServerName serverName) throws IOException {
get(admin.compactRegionServer(serverName));
}
@Override
public void majorCompactRegionServer(ServerName serverName) throws IOException {
get(admin.majorCompactRegionServer(serverName));
}
@Override
public void move(byte[] encodedRegionName) throws IOException {
get(admin.move(encodedRegionName));
}
@Override
public void move(byte[] encodedRegionName, ServerName destServerName) throws IOException {
get(admin.move(encodedRegionName, destServerName));
}
@Override
public void assign(byte[] regionName) throws IOException {
get(admin.assign(regionName));
}
@Override
public void unassign(byte[] regionName) throws IOException {
get(admin.unassign(regionName));
}
@Override
public void offline(byte[] regionName) throws IOException {
get(admin.offline(regionName));
}
@Override
public boolean balancerSwitch(boolean onOrOff, boolean synchronous) throws IOException {
return get(admin.balancerSwitch(onOrOff, synchronous));
}
@Override
public BalanceResponse balance(BalanceRequest request) throws IOException {
return get(admin.balance(request));
}
@Override
public boolean balance() throws IOException {
return get(admin.balance());
}
@Override
public boolean balance(boolean force) throws IOException {
return get(admin.balance(force));
}
@Override
public boolean isBalancerEnabled() throws IOException {
return get(admin.isBalancerEnabled());
}
@Override
public CacheEvictionStats clearBlockCache(TableName tableName) throws IOException {
return get(admin.clearBlockCache(tableName));
}
@Override
public boolean normalize(NormalizeTableFilterParams ntfp) throws IOException {
return get(admin.normalize(ntfp));
}
@Override
public boolean isNormalizerEnabled() throws IOException {
return get(admin.isNormalizerEnabled());
}
@Override
public boolean normalizerSwitch(boolean on) throws IOException {
return get(admin.normalizerSwitch(on));
}
@Override
public boolean catalogJanitorSwitch(boolean onOrOff) throws IOException {
return get(admin.catalogJanitorSwitch(onOrOff));
}
@Override
public int runCatalogJanitor() throws IOException {
return get(admin.runCatalogJanitor());
}
@Override
public boolean isCatalogJanitorEnabled() throws IOException {
return get(admin.isCatalogJanitorEnabled());
}
@Override
public boolean cleanerChoreSwitch(boolean onOrOff) throws IOException {
return get(admin.cleanerChoreSwitch(onOrOff));
}
@Override
public boolean runCleanerChore() throws IOException {
return get(admin.runCleanerChore());
}
@Override
public boolean isCleanerChoreEnabled() throws IOException {
return get(admin.isCleanerChoreEnabled());
}
@Override
public Future<Void> mergeRegionsAsync(byte[][] nameOfRegionsToMerge, boolean forcible)
throws IOException {
return admin.mergeRegions(Arrays.asList(nameOfRegionsToMerge), forcible);
}
@Override
public void split(TableName tableName) throws IOException {
get(admin.split(tableName));
}
@Override
public void split(TableName tableName, byte[] splitPoint) throws IOException {
get(admin.split(tableName, splitPoint));
}
@Override
public Future<Void> splitRegionAsync(byte[] regionName) throws IOException {
return admin.splitRegion(regionName);
}
@Override
public Future<Void> splitRegionAsync(byte[] regionName, byte[] splitPoint) throws IOException {
return admin.splitRegion(regionName, splitPoint);
}
@Override
public void truncateRegion(byte[] regionName) throws IOException {
get(admin.truncateRegion(regionName));
}
@Override
public Future<Void> truncateRegionAsync(byte[] regionName) {
return admin.truncateRegion(regionName);
}
@Override
public Future<Void> modifyTableAsync(TableDescriptor td) throws IOException {
return modifyTableAsync(td, true);
}
@Override
public Future<Void> modifyTableAsync(TableDescriptor td, boolean reopenRegions)
throws IOException {
return admin.modifyTable(td, reopenRegions);
}
@Override
public Future<Void> modifyTableStoreFileTrackerAsync(TableName tableName, String dstSFT)
throws IOException {
return admin.modifyTableStoreFileTracker(tableName, dstSFT);
}
@Override
public void shutdown() throws IOException {
get(admin.shutdown());
}
@Override
public void stopMaster() throws IOException {
get(admin.stopMaster());
}
@Override
public boolean isMasterInMaintenanceMode() throws IOException {
return get(admin.isMasterInMaintenanceMode());
}
@Override
public void stopRegionServer(String hostnamePort) throws IOException {
get(admin.stopRegionServer(ServerName.valueOf(hostnamePort, 0)));
}
@Override
public ClusterMetrics getClusterMetrics(EnumSet<Option> options) throws IOException {
return get(admin.getClusterMetrics(options));
}
@Override
public List<RegionMetrics> getRegionMetrics(ServerName serverName) throws IOException {
return get(admin.getRegionMetrics(serverName));
}
@Override
public List<RegionMetrics> getRegionMetrics(ServerName serverName, TableName tableName)
throws IOException {
return get(admin.getRegionMetrics(serverName, tableName));
}
@Override
public Configuration getConfiguration() {
return conn.getConfiguration();
}
@Override
public Future<Void> createNamespaceAsync(NamespaceDescriptor descriptor) throws IOException {
return admin.createNamespace(descriptor);
}
@Override
public Future<Void> modifyNamespaceAsync(NamespaceDescriptor descriptor) throws IOException {
return admin.modifyNamespace(descriptor);
}
@Override
public Future<Void> deleteNamespaceAsync(String name) throws IOException {
return admin.deleteNamespace(name);
}
@Override
public NamespaceDescriptor getNamespaceDescriptor(String name)
throws NamespaceNotFoundException, IOException {
return get(admin.getNamespaceDescriptor(name));
}
@Override
public String[] listNamespaces() throws IOException {
return get(admin.listNamespaces()).toArray(new String[0]);
}
@Override
public NamespaceDescriptor[] listNamespaceDescriptors() throws IOException {
return get(admin.listNamespaceDescriptors()).toArray(new NamespaceDescriptor[0]);
}
@Override
public List<TableDescriptor> listTableDescriptorsByNamespace(byte[] name) throws IOException {
return get(admin.listTableDescriptorsByNamespace(Bytes.toString(name)));
}
@Override
public TableName[] listTableNamesByNamespace(String name) throws IOException {
return get(admin.listTableNamesByNamespace(name)).toArray(new TableName[0]);
}
@Override
public List<RegionInfo> getRegions(TableName tableName) throws IOException {
return get(admin.getRegions(tableName));
}
@Override
public void close() {
// do nothing, AsyncAdmin is not a Closeable.
}
@Override
public List<TableDescriptor> listTableDescriptors(List<TableName> tableNames) throws IOException {
return get(admin.listTableDescriptors(tableNames));
}
@Override
public Future<Boolean> abortProcedureAsync(long procId, boolean mayInterruptIfRunning)
throws IOException {
return admin.abortProcedure(procId, mayInterruptIfRunning);
}
@Override
public String getProcedures() throws IOException {
return get(admin.getProcedures());
}
@Override
public String getLocks() throws IOException {
return get(admin.getLocks());
}
@Override
public void rollWALWriter(ServerName serverName) throws IOException, FailedLogCloseException {
get(admin.rollWALWriter(serverName));
}
@Override
public Map<ServerName, Long> rollAllWALWriters() throws IOException {
return get(admin.rollAllWALWriters());
}
@Override
public CompactionState getCompactionState(TableName tableName) throws IOException {
return get(admin.getCompactionState(tableName));
}
@Override
public CompactionState getCompactionState(TableName tableName, CompactType compactType)
throws IOException {
return get(admin.getCompactionState(tableName, compactType));
}
@Override
public CompactionState getCompactionStateForRegion(byte[] regionName) throws IOException {
return get(admin.getCompactionStateForRegion(regionName));
}
@Override
public long getLastMajorCompactionTimestamp(TableName tableName) throws IOException {
return get(admin.getLastMajorCompactionTimestamp(tableName)).orElse(0L);
}
@Override
public long getLastMajorCompactionTimestampForRegion(byte[] regionName) throws IOException {
return get(admin.getLastMajorCompactionTimestampForRegion(regionName)).orElse(0L);
}
@Override
public void snapshot(SnapshotDescription snapshot)
throws IOException, SnapshotCreationException, IllegalArgumentException {
get(admin.snapshot(snapshot));
}
@Override
public Future<Void> snapshotAsync(SnapshotDescription snapshot)
throws IOException, SnapshotCreationException {
return admin.snapshot(snapshot);
}
@Override
public boolean isSnapshotFinished(SnapshotDescription snapshot)
throws IOException, HBaseSnapshotException, UnknownSnapshotException {
return get(admin.isSnapshotFinished(snapshot));
}
@Override
public void restoreSnapshot(String snapshotName) throws IOException, RestoreSnapshotException {
get(admin.restoreSnapshot(snapshotName));
}
@Override
public void restoreSnapshot(String snapshotName, boolean takeFailSafeSnapshot, boolean restoreAcl)
throws IOException, RestoreSnapshotException {
get(admin.restoreSnapshot(snapshotName, takeFailSafeSnapshot, restoreAcl));
}
@Override
public Future<Void> cloneSnapshotAsync(String snapshotName, TableName tableName,
boolean restoreAcl, String customSFT)
throws IOException, TableExistsException, RestoreSnapshotException {
return admin.cloneSnapshot(snapshotName, tableName, restoreAcl, customSFT);
}
@Override
public void execProcedure(String signature, String instance, Map<String, String> props)
throws IOException {
get(admin.execProcedure(signature, instance, props));
}
@Override
public byte[] execProcedureWithReturn(String signature, String instance,
Map<String, String> props) throws IOException {
return get(admin.execProcedureWithReturn(signature, instance, props));
}
@Override
public boolean isProcedureFinished(String signature, String instance, Map<String, String> props)
throws IOException {
return get(admin.isProcedureFinished(signature, instance, props));
}
@Override
public List<SnapshotDescription> listSnapshots() throws IOException {
return get(admin.listSnapshots());
}
@Override
public List<SnapshotDescription> listSnapshots(Pattern pattern) throws IOException {
return get(admin.listSnapshots(pattern));
}
@Override
public List<SnapshotDescription> listTableSnapshots(Pattern tableNamePattern,
Pattern snapshotNamePattern) throws IOException {
return get(admin.listTableSnapshots(tableNamePattern, snapshotNamePattern));
}
@Override
public void deleteSnapshot(String snapshotName) throws IOException {
get(admin.deleteSnapshot(snapshotName));
}
@Override
public void deleteSnapshots(Pattern pattern) throws IOException {
get(admin.deleteSnapshots(pattern));
}
@Override
public void deleteTableSnapshots(Pattern tableNamePattern, Pattern snapshotNamePattern)
throws IOException {
get(admin.deleteTableSnapshots(tableNamePattern, snapshotNamePattern));
}
@Override
public void setQuota(QuotaSettings quota) throws IOException {
get(admin.setQuota(quota));
}
@Override
public List<QuotaSettings> getQuota(QuotaFilter filter) throws IOException {
return get(admin.getQuota(filter));
}
@SuppressWarnings("deprecation")
private static final class SyncCoprocessorRpcChannelOverAsync implements CoprocessorRpcChannel {
private final RpcChannel delegate;
public SyncCoprocessorRpcChannelOverAsync(RpcChannel delegate) {
this.delegate = delegate;
}
@Override
public void callMethod(MethodDescriptor method, RpcController controller, Message request,
Message responsePrototype, RpcCallback<Message> done) {
ClientCoprocessorRpcController c = new ClientCoprocessorRpcController();
CoprocessorBlockingRpcCallback<Message> callback = new CoprocessorBlockingRpcCallback<>();
delegate.callMethod(method, c, request, responsePrototype, callback);
Message ret;
try {
ret = callback.get();
} catch (IOException e) {
setCoprocessorError(controller, e);
return;
}
if (c.failed()) {
setCoprocessorError(controller, c.getFailed());
}
done.run(ret);
}
@Override
public Message callBlockingMethod(MethodDescriptor method, RpcController controller,
Message request, Message responsePrototype) throws ServiceException {
ClientCoprocessorRpcController c = new ClientCoprocessorRpcController();
CoprocessorBlockingRpcCallback<Message> done = new CoprocessorBlockingRpcCallback<>();
callMethod(method, c, request, responsePrototype, done);
Message ret;
try {
ret = done.get();
} catch (IOException e) {
throw new ServiceException(e);
}
if (c.failed()) {
setCoprocessorError(controller, c.getFailed());
throw new ServiceException(c.getFailed());
}
return ret;
}
}
@SuppressWarnings("deprecation")
@Override
public CoprocessorRpcChannel coprocessorService() {
return new SyncCoprocessorRpcChannelOverAsync(
new MasterCoprocessorRpcChannelImpl(admin.<Message> newMasterCaller()));
}
@SuppressWarnings("deprecation")
@Override
public CoprocessorRpcChannel coprocessorService(ServerName serverName) {
return new SyncCoprocessorRpcChannelOverAsync(new RegionServerCoprocessorRpcChannelImpl(
admin.<Message> newServerCaller().serverName(serverName)));
}
@Override
public void updateConfiguration(ServerName server) throws IOException {
get(admin.updateConfiguration(server));
}
@Override
public void updateConfiguration() throws IOException {
get(admin.updateConfiguration());
}
@Override
public void updateConfiguration(String groupName) throws IOException {
get(admin.updateConfiguration(groupName));
}
@Override
public List<SecurityCapability> getSecurityCapabilities() throws IOException {
return get(admin.getSecurityCapabilities());
}
@Override
public boolean splitSwitch(boolean enabled, boolean synchronous) throws IOException {
return get(admin.splitSwitch(enabled, synchronous));
}
@Override
public boolean mergeSwitch(boolean enabled, boolean synchronous) throws IOException {
return get(admin.mergeSwitch(enabled, synchronous));
}
@Override
public boolean isSplitEnabled() throws IOException {
return get(admin.isSplitEnabled());
}
@Override
public boolean isMergeEnabled() throws IOException {
return get(admin.isMergeEnabled());
}
@Override
public Future<Void> addReplicationPeerAsync(String peerId, ReplicationPeerConfig peerConfig,
boolean enabled) throws IOException {
return admin.addReplicationPeer(peerId, peerConfig, enabled);
}
@Override
public Future<Void> removeReplicationPeerAsync(String peerId) throws IOException {
return admin.removeReplicationPeer(peerId);
}
@Override
public Future<Void> enableReplicationPeerAsync(String peerId) throws IOException {
return admin.enableReplicationPeer(peerId);
}
@Override
public Future<Void> disableReplicationPeerAsync(String peerId) throws IOException {
return admin.disableReplicationPeer(peerId);
}
@Override
public ReplicationPeerConfig getReplicationPeerConfig(String peerId) throws IOException {
return get(admin.getReplicationPeerConfig(peerId));
}
@Override
public Future<Void> updateReplicationPeerConfigAsync(String peerId,
ReplicationPeerConfig peerConfig) throws IOException {
return admin.updateReplicationPeerConfig(peerId, peerConfig);
}
@Override
public List<ReplicationPeerDescription> listReplicationPeers() throws IOException {
return get(admin.listReplicationPeers());
}
@Override
public List<ReplicationPeerDescription> listReplicationPeers(Pattern pattern) throws IOException {
return get(admin.listReplicationPeers(pattern));
}
@Override
public Future<Void> transitReplicationPeerSyncReplicationStateAsync(String peerId,
SyncReplicationState state) throws IOException {
return admin.transitReplicationPeerSyncReplicationState(peerId, state);
}
@Override
public boolean isReplicationPeerEnabled(String peerId) throws IOException {
return get(admin.isReplicationPeerEnabled(peerId));
}
@Override
public boolean replicationPeerModificationSwitch(boolean on, boolean drainProcedures)
throws IOException {
return get(admin.replicationPeerModificationSwitch(on, drainProcedures));
}
@Override
public boolean isReplicationPeerModificationEnabled() throws IOException {
return get(admin.isReplicationPeerModificationEnabled());
}
@Override
public void decommissionRegionServers(List<ServerName> servers, boolean offload)
throws IOException {
get(admin.decommissionRegionServers(servers, offload));
}
@Override
public List<ServerName> listDecommissionedRegionServers() throws IOException {
return get(admin.listDecommissionedRegionServers());
}
@Override
public void recommissionRegionServer(ServerName server, List<byte[]> encodedRegionNames)
throws IOException {
get(admin.recommissionRegionServer(server, encodedRegionNames));
}
@Override
public List<TableCFs> listReplicatedTableCFs() throws IOException {
return get(admin.listReplicatedTableCFs());
}
@Override
public void enableTableReplication(TableName tableName) throws IOException {
get(admin.enableTableReplication(tableName));
}
@Override
public void disableTableReplication(TableName tableName) throws IOException {
get(admin.disableTableReplication(tableName));
}
@Override
public void clearCompactionQueues(ServerName serverName, Set<String> queues)
throws IOException, InterruptedException {
get(admin.clearCompactionQueues(serverName, queues));
}
@Override
public List<ServerName> clearDeadServers(List<ServerName> servers) throws IOException {
return get(admin.clearDeadServers(servers));
}
@Override
public void cloneTableSchema(TableName tableName, TableName newTableName, boolean preserveSplits)
throws IOException {
get(admin.cloneTableSchema(tableName, newTableName, preserveSplits));
}
@Override
public boolean switchRpcThrottle(boolean enable) throws IOException {
return get(admin.switchRpcThrottle(enable));
}
@Override
public boolean isRpcThrottleEnabled() throws IOException {
return get(admin.isRpcThrottleEnabled());
}
@Override
public boolean exceedThrottleQuotaSwitch(boolean enable) throws IOException {
return get(admin.exceedThrottleQuotaSwitch(enable));
}
@Override
public Map<TableName, Long> getSpaceQuotaTableSizes() throws IOException {
return get(admin.getSpaceQuotaTableSizes());
}
@Override
public Map<TableName, ? extends SpaceQuotaSnapshotView>
getRegionServerSpaceQuotaSnapshots(ServerName serverName) throws IOException {
return get(admin.getRegionServerSpaceQuotaSnapshots(serverName));
}
@Override
public SpaceQuotaSnapshotView getCurrentSpaceQuotaSnapshot(String namespace) throws IOException {
return get(admin.getCurrentSpaceQuotaSnapshot(namespace));
}
@Override
public SpaceQuotaSnapshotView getCurrentSpaceQuotaSnapshot(TableName tableName)
throws IOException {
return get(admin.getCurrentSpaceQuotaSnapshot(tableName));
}
@Override
public void grant(UserPermission userPermission, boolean mergeExistingPermissions)
throws IOException {
get(admin.grant(userPermission, mergeExistingPermissions));
}
@Override
public void revoke(UserPermission userPermission) throws IOException {
get(admin.revoke(userPermission));
}
@Override
public List<UserPermission>
getUserPermissions(GetUserPermissionsRequest getUserPermissionsRequest) throws IOException {
return get(admin.getUserPermissions(getUserPermissionsRequest));
}
@Override
public List<Boolean> hasUserPermissions(String userName, List<Permission> permissions)
throws IOException {
return get(admin.hasUserPermissions(userName, permissions));
}
@Override
public boolean snapshotCleanupSwitch(final boolean on, final boolean synchronous)
throws IOException {
return get(admin.snapshotCleanupSwitch(on, synchronous));
}
@Override
public boolean isSnapshotCleanupEnabled() throws IOException {
return get(admin.isSnapshotCleanupEnabled());
}
@Override
public List<Boolean> clearSlowLogResponses(final Set<ServerName> serverNames) throws IOException {
return get(admin.clearSlowLogResponses(serverNames));
}
@Override
public RSGroupInfo getRSGroup(String groupName) throws IOException {
return get(admin.getRSGroup(groupName));
}
@Override
public void moveServersToRSGroup(Set<Address> servers, String groupName) throws IOException {
get(admin.moveServersToRSGroup(servers, groupName));
}
@Override
public void addRSGroup(String groupName) throws IOException {
get(admin.addRSGroup(groupName));
}
@Override
public void removeRSGroup(String groupName) throws IOException {
get(admin.removeRSGroup(groupName));
}
@Override
public BalanceResponse balanceRSGroup(String groupName, BalanceRequest request)
throws IOException {
return get(admin.balanceRSGroup(groupName, request));
}
@Override
public List<RSGroupInfo> listRSGroups() throws IOException {
return get(admin.listRSGroups());
}
@Override
public List<TableName> listTablesInRSGroup(String groupName) throws IOException {
return get(admin.listTablesInRSGroup(groupName));
}
@Override
public Pair<List<String>, List<TableName>>
getConfiguredNamespacesAndTablesInRSGroup(String groupName) throws IOException {
return get(admin.getConfiguredNamespacesAndTablesInRSGroup(groupName));
}
@Override
public RSGroupInfo getRSGroup(Address hostPort) throws IOException {
return get(admin.getRSGroup(hostPort));
}
@Override
public void removeServersFromRSGroup(Set<Address> servers) throws IOException {
get(admin.removeServersFromRSGroup(servers));
}
@Override
public RSGroupInfo getRSGroup(TableName tableName) throws IOException {
return get(admin.getRSGroup(tableName));
}
@Override
public void setRSGroup(Set<TableName> tables, String groupName) throws IOException {
get(admin.setRSGroup(tables, groupName));
}
@Override
public void renameRSGroup(String oldName, String newName) throws IOException {
get(admin.renameRSGroup(oldName, newName));
}
@Override
public void updateRSGroupConfig(String groupName, Map<String, String> configuration)
throws IOException {
get(admin.updateRSGroupConfig(groupName, configuration));
}
@Override
public List<LogEntry> getLogEntries(Set<ServerName> serverNames, String logType,
ServerType serverType, int limit, Map<String, Object> filterParams) throws IOException {
return get(admin.getLogEntries(serverNames, logType, serverType, limit, filterParams));
}
@Override
public void flushMasterStore() throws IOException {
get(admin.flushMasterStore());
}
@Override
public List<String> getCachedFilesList(ServerName serverName) throws IOException {
return get(admin.getCachedFilesList(serverName));
}
@Override
public void restoreBackupSystemTable(String snapshotName) throws IOException {
get(admin.restoreBackupSystemTable(snapshotName));
}
}
|
apache/ozone | 36,277 | hadoop-hdds/client/src/test/java/org/apache/hadoop/ozone/client/io/TestECBlockReconstructedStripeInputStream.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ozone.client.io;
import static java.util.Collections.emptySet;
import static java.util.Collections.singleton;
import static java.util.stream.Collectors.toSet;
import static org.apache.hadoop.ozone.client.io.ECStreamTestUtil.generateParity;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import com.google.common.collect.ImmutableSet;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SplittableRandom;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadLocalRandom;
import org.apache.commons.lang3.RandomUtils;
import org.apache.hadoop.hdds.client.ECReplicationConfig;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.MockDatanodeDetails;
import org.apache.hadoop.hdds.scm.OzoneClientConfig;
import org.apache.hadoop.hdds.scm.storage.BlockLocationInfo;
import org.apache.hadoop.io.ByteBufferPool;
import org.apache.hadoop.io.ElasticByteBufferPool;
import org.apache.hadoop.ozone.client.io.ECStreamTestUtil.TestBlockInputStream;
import org.apache.hadoop.ozone.client.io.ECStreamTestUtil.TestBlockInputStreamFactory;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
/**
* Test for the ECBlockReconstructedStripeInputStream.
*/
public class TestECBlockReconstructedStripeInputStream {
private static final int ONEMB = 1024 * 1024;
private ECReplicationConfig repConfig;
private ECStreamTestUtil.TestBlockInputStreamFactory streamFactory;
private long randomSeed;
private ThreadLocalRandom random = ThreadLocalRandom.current();
private SplittableRandom dataGen;
private ByteBufferPool bufferPool = new ElasticByteBufferPool();
private ExecutorService ecReconstructExecutor =
Executors.newFixedThreadPool(3);
private OzoneConfiguration conf = new OzoneConfiguration();
static List<Set<Integer>> recoveryCases() { // TODO better name
List<Set<Integer>> params = new ArrayList<>();
params.add(emptySet()); // non-recovery
for (int i = 0; i < 5; i++) {
params.add(singleton(i));
}
params.add(ImmutableSet.of(0, 1)); // only data
params.add(ImmutableSet.of(1, 4)); // data and parity
params.add(ImmutableSet.of(3, 4)); // only parity
params.add(ImmutableSet.of(2, 3)); // data and parity
params.add(ImmutableSet.of(2, 4)); // data and parity
return params;
}
@BeforeEach
public void setup() {
polluteByteBufferPool();
repConfig = new ECReplicationConfig(3, 2,
ECReplicationConfig.EcCodec.RS, ONEMB);
streamFactory = new ECStreamTestUtil.TestBlockInputStreamFactory();
randomSeed = random.nextLong();
dataGen = new SplittableRandom(randomSeed);
}
/**
* All the tests here use a chunk size of 1MB, but in a mixed workload
* cluster, it is possible to have multiple EC chunk sizes. This will result
* in the byte buffer pool having buffers of varying size and when a buffer is
* requested it can receive a buffer larger than the request size. That caused
* problems in HDDS-7304, so this method ensures the buffer pool has some
* larger buffers to return.
*/
private void polluteByteBufferPool() {
List<ByteBuffer> bufs = new ArrayList<>();
for (int i = 0; i < 5; i++) {
ByteBuffer b = bufferPool.getBuffer(false, ONEMB * 3);
bufs.add(b);
}
for (ByteBuffer b : bufs) {
bufferPool.putBuffer(b);
}
}
@AfterEach
public void teardown() {
ecReconstructExecutor.shutdownNow();
}
@Test
public void testSufficientLocations() throws IOException {
// One chunk, only 1 location.
BlockLocationInfo keyInfo = ECStreamTestUtil
.createKeyInfo(repConfig, 1, ONEMB);
try (ECBlockInputStream ecb = createInputStream(keyInfo)) {
assertTrue(ecb.hasSufficientLocations());
}
// Two Chunks, but missing data block 2.
Map<DatanodeDetails, Integer> dnMap
= ECStreamTestUtil.createIndexMap(1, 4, 5);
keyInfo = ECStreamTestUtil.createKeyInfo(repConfig, ONEMB * 2, dnMap);
try (ECBlockReconstructedStripeInputStream ecb =
createInputStream(keyInfo)) {
assertTrue(ecb.hasSufficientLocations());
Collection<Integer> idxs = dnMap.values();
for (int i : idxs) {
ecb.setRecoveryIndexes(singleton(i - 1));
assertTrue(ecb.hasSufficientLocations());
}
// trying to recover all
ecb.setRecoveryIndexes(toBufferIndexes(idxs));
assertFalse(ecb.hasSufficientLocations());
}
// Three Chunks, but missing data block 2 and 3.
dnMap = ECStreamTestUtil.createIndexMap(1, 4, 5);
keyInfo = ECStreamTestUtil.createKeyInfo(repConfig, ONEMB * 3, dnMap);
try (ECBlockReconstructedStripeInputStream ecb =
createInputStream(keyInfo)) {
assertTrue(ecb.hasSufficientLocations());
// Set a failed location
List<DatanodeDetails> failed = new ArrayList<>();
failed.add(keyInfo.getPipeline().getFirstNode());
ecb.addFailedDatanodes(failed);
assertFalse(ecb.hasSufficientLocations());
}
// Three Chunks, but missing data block 2 and 3 and parity 1.
dnMap = ECStreamTestUtil.createIndexMap(1, 4);
keyInfo = ECStreamTestUtil.createKeyInfo(repConfig, ONEMB * 3, dnMap);
try (ECBlockInputStream ecb = createInputStream(keyInfo)) {
assertFalse(ecb.hasSufficientLocations());
}
// Three Chunks, all available but fail 3
dnMap = ECStreamTestUtil.createIndexMap(1, 2, 3, 4, 5);
keyInfo = ECStreamTestUtil.createKeyInfo(repConfig, ONEMB * 3, dnMap);
try (ECBlockReconstructedStripeInputStream ecb =
createInputStream(keyInfo)) {
assertTrue(ecb.hasSufficientLocations());
// Set a failed location
List<DatanodeDetails> failed = new ArrayList<>();
for (Map.Entry<DatanodeDetails, Integer> entry : dnMap.entrySet()) {
failed.add(entry.getKey());
boolean expected = failed.size() < 3;
ecb.addFailedDatanodes(singleton(entry.getKey()));
assertEquals(expected, ecb.hasSufficientLocations());
}
}
try (ECBlockReconstructedStripeInputStream ecb =
createInputStream(keyInfo)) {
List<Integer> recover = new ArrayList<>();
for (int i : Arrays.asList(4, 3, 2, 1, 0)) {
recover.add(i);
ecb.setRecoveryIndexes(recover);
boolean expected = recover.size() < 3;
assertEquals(expected, ecb.hasSufficientLocations());
}
}
// One chunk, indexes 2 and 3 are padding, but still reported in the
// container list. The other locations are missing so we should have
// insufficient locations.
dnMap = ECStreamTestUtil.createIndexMap(2, 3);
keyInfo = ECStreamTestUtil.createKeyInfo(repConfig, ONEMB, dnMap);
try (ECBlockInputStream ecb = createInputStream(keyInfo)) {
assertFalse(ecb.hasSufficientLocations());
}
}
@ParameterizedTest
@MethodSource("recoveryCases")
void testReadFullStripesWithPartial(Set<Integer> recoveryIndexes)
throws IOException {
// Generate the input data for 3 full stripes and generate the parity.
int chunkSize = repConfig.getEcChunkSize();
int partialStripeSize = chunkSize * 2 - 1;
int blockLength = chunkSize * repConfig.getData() * 3 + partialStripeSize;
ByteBuffer[] dataBufs = allocateBuffers(repConfig.getData(), 4 * chunkSize);
ECStreamTestUtil.randomFill(dataBufs, chunkSize, dataGen, blockLength);
ByteBuffer[] parity = generateParity(dataBufs, repConfig);
Map<DatanodeDetails, Integer> dnMap =
ECStreamTestUtil.createIndexMap(1, 2, 3, 4, 5);
streamFactory = new TestBlockInputStreamFactory();
addDataStreamsToFactory(dataBufs, parity);
BlockLocationInfo keyInfo = ECStreamTestUtil.createKeyInfo(repConfig,
stripeSize() * 3 + partialStripeSize, dnMap);
streamFactory.setCurrentPipeline(keyInfo.getPipeline());
List<Integer> outputIndexes = getOutputIndexes(recoveryIndexes);
ByteBuffer[] bufs = allocateByteBuffers(
outputIndexes.size(), repConfig.getEcChunkSize());
dataGen = new SplittableRandom(randomSeed);
try (ECBlockReconstructedStripeInputStream ecb =
createInputStream(keyInfo)) {
ecb.setRecoveryIndexes(recoveryIndexes);
// Read 3 full stripes
for (int i = 0; i < 3; i++) {
int read = ecb.read(bufs);
assertEquals(stripeSize(), read);
int output = 0;
for (int j = 0; j < repConfig.getRequiredNodes(); j++) {
if (outputIndexes.contains(j)) {
ECStreamTestUtil.assertBufferMatches(bufs[output++], dataGen);
}
}
// Check the underlying streams have read 1 chunk per read:
for (TestBlockInputStream bis : streamFactory.getBlockStreams()) {
assertEquals(chunkSize * (i + 1),
bis.getPos());
}
assertEquals(stripeSize() * (i + 1), ecb.getPos());
clearBuffers(bufs);
}
// The next read is a partial stripe
int read = ecb.read(bufs);
assertEquals(partialStripeSize, read);
int output = 0;
for (int j = 0; j < 2; j++) {
if (outputIndexes.contains(j)) {
ECStreamTestUtil.assertBufferMatches(bufs[output++], dataGen);
}
}
if (outputIndexes.contains(2)) {
assertEquals(0, bufs[output].remaining());
assertEquals(0, bufs[output].position());
}
// A further read should give EOF
clearBuffers(bufs);
read = ecb.read(bufs);
assertEquals(-1, read);
}
}
@Test
public void testReadPartialStripe() throws IOException {
int blockLength = repConfig.getEcChunkSize() - 1;
ByteBuffer[] dataBufs = allocateBuffers(repConfig.getData(), 3 * ONEMB);
ECStreamTestUtil
.randomFill(dataBufs, repConfig.getEcChunkSize(), dataGen, blockLength);
ByteBuffer[] parity = generateParity(dataBufs, repConfig);
addDataStreamsToFactory(dataBufs, parity);
ByteBuffer[] bufs = allocateByteBuffers(repConfig);
// We have a length that is less than a single chunk, so blocks 2 and 3
// are padding and will not be present. Block 1 is lost and needs recovered
// from the parity and padded blocks 2 and 3.
Map<DatanodeDetails, Integer> dnMap =
ECStreamTestUtil.createIndexMap(4, 5);
BlockLocationInfo keyInfo =
ECStreamTestUtil.createKeyInfo(repConfig, blockLength, dnMap);
streamFactory.setCurrentPipeline(keyInfo.getPipeline());
dataGen = new SplittableRandom(randomSeed);
try (ECBlockReconstructedStripeInputStream ecb =
createInputStream(keyInfo)) {
int read = ecb.read(bufs);
assertEquals(blockLength, read);
ECStreamTestUtil.assertBufferMatches(bufs[0], dataGen);
assertEquals(0, bufs[1].remaining());
assertEquals(0, bufs[1].position());
assertEquals(0, bufs[2].remaining());
assertEquals(0, bufs[2].position());
// Check the underlying streams have been advanced by 1 blockLength:
for (TestBlockInputStream bis : streamFactory.getBlockStreams()) {
assertEquals(blockLength, bis.getPos());
}
assertEquals(ecb.getPos(), blockLength);
clearBuffers(bufs);
// A further read should give EOF
read = ecb.read(bufs);
assertEquals(-1, read);
}
}
@Test
void recoverPartialStripe() throws IOException {
int ecChunkSize = repConfig.getEcChunkSize();
int blockLength = ecChunkSize - 1;
ByteBuffer[] dataBufs = allocateBuffers(repConfig.getData(), 3 * ONEMB);
ECStreamTestUtil
.randomFill(dataBufs, ecChunkSize, dataGen, blockLength);
ByteBuffer[] parity = generateParity(dataBufs, repConfig);
addDataStreamsToFactory(dataBufs, parity);
// We have a length that is less than a single chunk, so blocks 2 and 3 are
// padding and will not be present. Parity blocks are lost and need to be
// recovered from block 1 and padded blocks 2 and 3.
Map<DatanodeDetails, Integer> dnMap =
ECStreamTestUtil.createIndexMap(1, 4, 5);
ByteBuffer[] bufs = allocateByteBuffers(2, ecChunkSize);
BlockLocationInfo keyInfo =
ECStreamTestUtil.createKeyInfo(repConfig, blockLength, dnMap);
streamFactory.setCurrentPipeline(keyInfo.getPipeline());
dataGen = new SplittableRandom(randomSeed);
try (ECBlockReconstructedStripeInputStream ecb =
createInputStream(keyInfo)) {
ecb.setRecoveryIndexes(Arrays.asList(3, 4));
int read = ecb.read(bufs);
assertEquals(blockLength, read);
ECStreamTestUtil.assertBufferMatches(bufs[0], dataGen);
ECStreamTestUtil.assertBufferMatches(bufs[1], dataGen);
// Check the underlying streams have been advanced by 1 blockLength:
for (TestBlockInputStream bis : streamFactory.getBlockStreams()) {
assertEquals(blockLength, bis.getPos());
}
assertEquals(ecb.getPos(), blockLength);
clearBuffers(bufs);
// A further read should give EOF
read = ecb.read(bufs);
assertEquals(-1, read);
}
}
@Test
public void testReadPartialStripeTwoChunks() throws IOException {
int chunkSize = repConfig.getEcChunkSize();
int blockLength = chunkSize * 2 - 1;
ByteBuffer[] dataBufs = allocateBuffers(repConfig.getData(), 3 * ONEMB);
ECStreamTestUtil
.randomFill(dataBufs, repConfig.getEcChunkSize(), dataGen, blockLength);
ByteBuffer[] parity = generateParity(dataBufs, repConfig);
addDataStreamsToFactory(dataBufs, parity);
ByteBuffer[] bufs = allocateByteBuffers(repConfig);
// We have a length that is less than a single chunk, so blocks 2 and 3
// are padding and will not be present. Block 1 is lost and needs recovered
// from the parity and padded blocks 2 and 3.
Map<DatanodeDetails, Integer> dnMap =
ECStreamTestUtil.createIndexMap(4, 5);
BlockLocationInfo keyInfo =
ECStreamTestUtil.createKeyInfo(repConfig, blockLength, dnMap);
streamFactory.setCurrentPipeline(keyInfo.getPipeline());
dataGen = new SplittableRandom(randomSeed);
try (ECBlockReconstructedStripeInputStream ecb =
createInputStream(keyInfo)) {
int read = ecb.read(bufs);
assertEquals(blockLength, read);
ECStreamTestUtil.assertBufferMatches(bufs[0], dataGen);
ECStreamTestUtil.assertBufferMatches(bufs[1], dataGen);
assertEquals(0, bufs[2].remaining());
assertEquals(0, bufs[2].position());
// Check the underlying streams have been advanced by 1 chunk:
for (TestBlockInputStream bis : streamFactory.getBlockStreams()) {
assertEquals(chunkSize, bis.getPos());
}
assertEquals(ecb.getPos(), blockLength);
clearBuffers(bufs);
// A further read should give EOF
read = ecb.read(bufs);
assertEquals(-1, read);
}
}
@Test
public void testReadPartialStripeThreeChunks() throws IOException {
int chunkSize = repConfig.getEcChunkSize();
int blockLength = chunkSize * 3 - 1;
ByteBuffer[] dataBufs = allocateBuffers(repConfig.getData(), 3 * ONEMB);
ECStreamTestUtil
.randomFill(dataBufs, repConfig.getEcChunkSize(), dataGen, blockLength);
ByteBuffer[] parity = generateParity(dataBufs, repConfig);
// We have a length that is less than a stripe, so chunks 1 and 2 are full.
// Block 1 is lost and needs recovered
// from the parity and padded blocks 2 and 3.
List<Map<DatanodeDetails, Integer>> locations = new ArrayList<>();
// Two data missing
locations.add(ECStreamTestUtil.createIndexMap(3, 4, 5));
// Two data missing
locations.add(ECStreamTestUtil.createIndexMap(1, 4, 5));
// One data missing - the last one
locations.add(ECStreamTestUtil.createIndexMap(1, 2, 5));
// One data and one parity missing
locations.add(ECStreamTestUtil.createIndexMap(2, 3, 4));
// One data and one parity missing
locations.add(ECStreamTestUtil.createIndexMap(1, 2, 4));
// No indexes missing
locations.add(ECStreamTestUtil.createIndexMap(1, 2, 3, 4, 5));
for (Map<DatanodeDetails, Integer> dnMap : locations) {
streamFactory = new TestBlockInputStreamFactory();
addDataStreamsToFactory(dataBufs, parity);
ByteBuffer[] bufs = allocateByteBuffers(repConfig);
BlockLocationInfo keyInfo =
ECStreamTestUtil.createKeyInfo(repConfig, blockLength, dnMap);
streamFactory.setCurrentPipeline(keyInfo.getPipeline());
dataGen = new SplittableRandom(randomSeed);
try (ECBlockReconstructedStripeInputStream ecb =
createInputStream(keyInfo)) {
int read = ecb.read(bufs);
assertEquals(blockLength, read);
ECStreamTestUtil.assertBufferMatches(bufs[0], dataGen);
ECStreamTestUtil.assertBufferMatches(bufs[1], dataGen);
ECStreamTestUtil.assertBufferMatches(bufs[2], dataGen);
// Check the underlying streams have been advanced by 1 chunk:
for (TestBlockInputStream bis : streamFactory.getBlockStreams()) {
assertEquals(0, bis.getRemaining());
}
assertEquals(ecb.getPos(), blockLength);
clearBuffers(bufs);
// A further read should give EOF
read = ecb.read(bufs);
assertEquals(-1, read);
}
}
}
@Test
public void testErrorThrownIfBlockNotLongEnough() throws IOException {
int blockLength = repConfig.getEcChunkSize() - 1;
ByteBuffer[] dataBufs = allocateBuffers(repConfig.getData(), 3 * ONEMB);
ECStreamTestUtil
.randomFill(dataBufs, repConfig.getEcChunkSize(), dataGen, blockLength);
ByteBuffer[] parity = generateParity(dataBufs, repConfig);
addDataStreamsToFactory(dataBufs, parity);
// Set the parity buffer limit to be less than the block length
parity[0].limit(blockLength - 1);
parity[1].limit(blockLength - 1);
ByteBuffer[] bufs = allocateByteBuffers(repConfig);
// We have a length that is less than a single chunk, so blocks 2 and 3
// are padding and will not be present. Block 1 is lost and needs recovered
// from the parity and padded blocks 2 and 3.
Map<DatanodeDetails, Integer> dnMap =
ECStreamTestUtil.createIndexMap(4, 5);
BlockLocationInfo keyInfo =
ECStreamTestUtil.createKeyInfo(repConfig, blockLength, dnMap);
streamFactory.setCurrentPipeline(keyInfo.getPipeline());
try (ECBlockReconstructedStripeInputStream ecb = createInputStream(keyInfo)) {
assertThrows(InsufficientLocationsException.class, () -> ecb.read(bufs));
}
}
@Test
void testNoErrorIfSpareLocationToRead() throws IOException {
int chunkSize = repConfig.getEcChunkSize();
int blockLength = chunkSize * 3 - 1;
ByteBuffer[] dataBufs = allocateBuffers(repConfig.getData(), 3 * ONEMB);
ECStreamTestUtil
.randomFill(dataBufs, repConfig.getEcChunkSize(), dataGen, blockLength);
ByteBuffer[] parity = generateParity(dataBufs, repConfig);
// We have a length that is less than a stripe, so chunks 1 and 2 are full.
// Block 1 is lost and needs recovered
// from the parity and padded blocks 2 and 3.
List<Map<DatanodeDetails, Integer>> locations = new ArrayList<>();
// Two data missing
locations.add(ECStreamTestUtil.createIndexMap(3, 4, 5));
// Two data missing
locations.add(ECStreamTestUtil.createIndexMap(1, 4, 5));
// One data missing - the last one
locations.add(ECStreamTestUtil.createIndexMap(1, 2, 5));
// One data and one parity missing
locations.add(ECStreamTestUtil.createIndexMap(2, 3, 4));
// One data and one parity missing
locations.add(ECStreamTestUtil.createIndexMap(1, 2, 4));
// No indexes missing
locations.add(ECStreamTestUtil.createIndexMap(1, 2, 3, 4, 5));
DatanodeDetails spare = MockDatanodeDetails.randomDatanodeDetails();
for (Map<DatanodeDetails, Integer> dnMap : locations) {
streamFactory = new TestBlockInputStreamFactory();
addDataStreamsToFactory(dataBufs, parity);
ByteBuffer[] bufs = allocateByteBuffers(repConfig);
// this index fails, but has spare replica
int failing = dnMap.values().iterator().next();
streamFactory.setFailIndexes(failing);
dnMap.put(spare, failing);
BlockLocationInfo keyInfo =
ECStreamTestUtil.createKeyInfo(repConfig, blockLength, dnMap);
streamFactory.setCurrentPipeline(keyInfo.getPipeline());
dataGen = new SplittableRandom(randomSeed);
try (ECBlockReconstructedStripeInputStream ecb =
createInputStream(keyInfo)) {
int read = ecb.read(bufs);
assertEquals(blockLength, read);
ECStreamTestUtil.assertBufferMatches(bufs[0], dataGen);
ECStreamTestUtil.assertBufferMatches(bufs[1], dataGen);
ECStreamTestUtil.assertBufferMatches(bufs[2], dataGen);
// Check the underlying streams have been advanced by 1 chunk:
for (TestBlockInputStream bis : streamFactory.getBlockStreams()) {
assertEquals(0, bis.getRemaining());
}
assertEquals(ecb.getPos(), blockLength);
clearBuffers(bufs);
// A further read should give EOF
read = ecb.read(bufs);
assertEquals(-1, read);
}
}
}
@Test
public void testSeek() throws IOException {
// Generate the input data for 3 full stripes and generate the parity
// and a partial stripe
int chunkSize = repConfig.getEcChunkSize();
int partialStripeSize = chunkSize * 2 - 1;
int dataLength = stripeSize() * 3 + partialStripeSize;
ByteBuffer[] dataBufs = allocateBuffers(repConfig.getData(), 4 * chunkSize);
ECStreamTestUtil
.randomFill(dataBufs, repConfig.getEcChunkSize(), dataGen, dataLength);
ByteBuffer[] parity = generateParity(dataBufs, repConfig);
List<Map<DatanodeDetails, Integer>> locations = new ArrayList<>();
// Two data missing
locations.add(ECStreamTestUtil.createIndexMap(1, 4, 5));
// One data missing
locations.add(ECStreamTestUtil.createIndexMap(1, 2, 4, 5));
// Two data missing including first
locations.add(ECStreamTestUtil.createIndexMap(2, 4, 5));
// One data and one parity missing
locations.add(ECStreamTestUtil.createIndexMap(2, 3, 4));
// No locations missing
locations.add(ECStreamTestUtil.createIndexMap(1, 2, 3, 4, 5));
for (Map<DatanodeDetails, Integer> dnMap : locations) {
streamFactory = new TestBlockInputStreamFactory();
addDataStreamsToFactory(dataBufs, parity);
BlockLocationInfo keyInfo = ECStreamTestUtil.createKeyInfo(repConfig,
stripeSize() * 3 + partialStripeSize, dnMap);
streamFactory.setCurrentPipeline(keyInfo.getPipeline());
ByteBuffer[] bufs = allocateByteBuffers(repConfig);
try (ECBlockReconstructedStripeInputStream ecb =
createInputStream(keyInfo)) {
// Read Stripe 1
int read = ecb.read(bufs);
for (int j = 0; j < bufs.length; j++) {
validateContents(dataBufs[j], bufs[j], 0, chunkSize);
}
assertEquals(stripeSize(), read);
assertEquals(dataLength - stripeSize(), ecb.getRemaining());
// Seek to 0 and read again
clearBuffers(bufs);
ecb.seek(0);
ecb.read(bufs);
for (int j = 0; j < bufs.length; j++) {
validateContents(dataBufs[j], bufs[j], 0, chunkSize);
}
assertEquals(stripeSize(), read);
assertEquals(dataLength - stripeSize(), ecb.getRemaining());
// Seek to the last stripe
// Seek to the last stripe
clearBuffers(bufs);
ecb.seek(stripeSize() * 3);
read = ecb.read(bufs);
validateContents(dataBufs[0], bufs[0], 3 * chunkSize, chunkSize);
validateContents(dataBufs[1], bufs[1], 3 * chunkSize, chunkSize - 1);
assertEquals(0, bufs[2].remaining());
assertEquals(partialStripeSize, read);
assertEquals(0, ecb.getRemaining());
// seek to the start of stripe 3
clearBuffers(bufs);
ecb.seek(stripeSize() * (long)2);
read = ecb.read(bufs);
for (int j = 0; j < bufs.length; j++) {
validateContents(dataBufs[j], bufs[j], 2 * chunkSize, chunkSize);
}
assertEquals(stripeSize(), read);
assertEquals(partialStripeSize, ecb.getRemaining());
}
}
}
@Test
public void testSeekToPartialOffsetFails() {
Map<DatanodeDetails, Integer> dnMap =
ECStreamTestUtil.createIndexMap(1, 4, 5);
BlockLocationInfo keyInfo = ECStreamTestUtil.createKeyInfo(repConfig,
stripeSize() * 3, dnMap);
streamFactory.setCurrentPipeline(keyInfo.getPipeline());
try (ECBlockReconstructedStripeInputStream ecb = createInputStream(keyInfo)) {
IOException e = assertThrows(IOException.class, () -> ecb.seek(10));
assertEquals("Requested position 10 does not align " +
"with a stripe offset", e.getMessage());
}
}
private Integer getRandomStreamIndex(Set<Integer> set) {
return set.stream().skip(RandomUtils.secure().randomInt(0, set.size()))
.findFirst().orElse(null);
}
@Test
public void testErrorReadingBlockContinuesReading() throws IOException {
// Generate the input data for 3 full stripes and generate the parity.
int chunkSize = repConfig.getEcChunkSize();
int partialStripeSize = chunkSize * 2 - 1;
int blockLength = repConfig.getEcChunkSize() * repConfig.getData() * 3
+ partialStripeSize;
ByteBuffer[] dataBufs = allocateBuffers(repConfig.getData(),
4 * chunkSize);
ECStreamTestUtil
.randomFill(dataBufs, repConfig.getEcChunkSize(), dataGen, blockLength);
ByteBuffer[] parity = generateParity(dataBufs, repConfig);
for (int k = 0; k < 5; k++) {
Set<Integer> failed = new HashSet<>();
streamFactory = new TestBlockInputStreamFactory();
addDataStreamsToFactory(dataBufs, parity);
// Data block index 3 is missing and needs recovered initially.
Map<DatanodeDetails, Integer> dnMap =
ECStreamTestUtil.createIndexMap(1, 2, 4, 5);
BlockLocationInfo keyInfo = ECStreamTestUtil.createKeyInfo(repConfig,
stripeSize() * 3 + partialStripeSize, dnMap);
streamFactory.setCurrentPipeline(keyInfo.getPipeline());
ByteBuffer[] bufs = allocateByteBuffers(repConfig);
try (ECBlockReconstructedStripeInputStream ecb =
createInputStream(keyInfo)) {
// After reading the first stripe, make one of the streams error
for (int i = 0; i < 3; i++) {
int read = ecb.read(bufs);
for (int j = 0; j < bufs.length; j++) {
validateContents(dataBufs[j], bufs[j], i * chunkSize, chunkSize);
}
assertEquals(stripeSize() * (i + 1), ecb.getPos());
assertEquals(stripeSize(), read);
clearBuffers(bufs);
if (i == 0) {
Integer failStream =
getRandomStreamIndex(streamFactory.getStreamIndexes());
streamFactory.getBlockStream(failStream)
.setShouldError(true);
failed.add(failStream);
}
}
// The next read is a partial stripe
int read = ecb.read(bufs);
assertEquals(partialStripeSize, read);
validateContents(dataBufs[0], bufs[0], 3 * chunkSize, chunkSize);
validateContents(dataBufs[1], bufs[1], 3 * chunkSize, chunkSize - 1);
assertEquals(0, bufs[2].remaining());
assertEquals(0, bufs[2].position());
// seek back to zero and read a stripe to re-open the streams
ecb.seek(0);
clearBuffers(bufs);
ecb.read(bufs);
// Now fail another random stream and the read should fail with
// insufficient locations
Set<Integer> currentStreams =
new HashSet<>(streamFactory.getStreamIndexes());
currentStreams.removeAll(failed);
Integer failStream = getRandomStreamIndex(currentStreams);
streamFactory.getBlockStream(failStream)
.setShouldError(true);
clearBuffers(bufs);
assertThrows(InsufficientLocationsException.class, () -> ecb.read(bufs));
}
}
}
@Test
public void testAllLocationsFailOnFirstRead() throws IOException {
// This test simulates stale nodes. When the nodes are stale, but not yet
// dead, the locations will still be given to the client and it will try to
// read them, but the read will always fail.
// Additionally, if the key is small (less than 2 EC chunks), the locations
// for the indexes which are all padding will be returned to the client and
// this can confuse the "sufficient locations" check, resulting in a strange
// error when selecting parity indexes (HDDS-6258)
int chunkSize = repConfig.getEcChunkSize();
int partialStripeSize = chunkSize;
int blockLength = partialStripeSize;
ByteBuffer[] dataBufs = allocateBuffers(repConfig.getData(), chunkSize);
ECStreamTestUtil
.randomFill(dataBufs, repConfig.getEcChunkSize(), dataGen, blockLength);
ByteBuffer[] parity = generateParity(dataBufs, repConfig);
streamFactory = new TestBlockInputStreamFactory();
addDataStreamsToFactory(dataBufs, parity);
// Fail all the indexes containing data on their first read.
streamFactory.setFailIndexes(1, 4, 5);
// The locations contain the padded indexes, as will often be the case
// when containers are reported by SCM.
Map<DatanodeDetails, Integer> dnMap =
ECStreamTestUtil.createIndexMap(1, 2, 3, 4, 5);
BlockLocationInfo keyInfo = ECStreamTestUtil.createKeyInfo(repConfig,
blockLength, dnMap);
streamFactory.setCurrentPipeline(keyInfo.getPipeline());
ByteBuffer[] bufs = allocateByteBuffers(repConfig);
try (ECBlockReconstructedStripeInputStream ecb =
createInputStream(keyInfo)) {
assertThrows(InsufficientLocationsException.class,
() -> ecb.read(bufs));
}
}
@Test
public void testFailedLocationsAreNotRead() throws IOException {
// Generate the input data for 3 full stripes and generate the parity.
int chunkSize = repConfig.getEcChunkSize();
int partialStripeSize = chunkSize * 2 - 1;
int blockLength = chunkSize * repConfig.getData() * 3 + partialStripeSize;
ByteBuffer[] dataBufs = allocateBuffers(repConfig.getData(), 4 * chunkSize);
ECStreamTestUtil.randomFill(dataBufs, chunkSize, dataGen, blockLength);
ByteBuffer[] parity = generateParity(dataBufs, repConfig);
streamFactory = new TestBlockInputStreamFactory();
addDataStreamsToFactory(dataBufs, parity);
Map<DatanodeDetails, Integer> dnMap =
ECStreamTestUtil.createIndexMap(1, 2, 3, 4, 5);
BlockLocationInfo keyInfo = ECStreamTestUtil.createKeyInfo(repConfig,
stripeSize() * 3 + partialStripeSize, dnMap);
streamFactory.setCurrentPipeline(keyInfo.getPipeline());
ByteBuffer[] bufs = allocateByteBuffers(repConfig);
dataGen = new SplittableRandom(randomSeed);
try (ECBlockReconstructedStripeInputStream ecb =
createInputStream(keyInfo)) {
List<DatanodeDetails> failed = new ArrayList<>();
// Set the first 3 DNs as failed
for (Map.Entry<DatanodeDetails, Integer> e : dnMap.entrySet()) {
if (e.getValue() <= 2) {
failed.add(e.getKey());
}
}
ecb.addFailedDatanodes(failed);
// Read full stripe
int read = ecb.read(bufs);
for (ByteBuffer buffer : bufs) {
ECStreamTestUtil.assertBufferMatches(buffer, dataGen);
}
assertEquals(stripeSize(), read);
// Now ensure that streams with repIndexes 1, 2 and 3 have not been
// created in the stream factory, indicating we did not read them.
List<TestBlockInputStream> streams = streamFactory.getBlockStreams();
for (TestBlockInputStream stream : streams) {
assertThat(stream.getEcReplicaIndex()).isGreaterThan(2);
}
}
}
private ECBlockReconstructedStripeInputStream createInputStream(
BlockLocationInfo keyInfo) {
OzoneClientConfig clientConfig = conf.getObject(OzoneClientConfig.class);
clientConfig.setChecksumVerify(true);
return new ECBlockReconstructedStripeInputStream(repConfig, keyInfo,
null, null, streamFactory, bufferPool, ecReconstructExecutor,
clientConfig);
}
private void addDataStreamsToFactory(ByteBuffer[] data, ByteBuffer[] parity) {
List<ByteBuffer> dataStreams = new ArrayList<>();
for (ByteBuffer b : data) {
dataStreams.add(b);
}
for (ByteBuffer b : parity) {
dataStreams.add(b);
}
streamFactory.setBlockStreamData(dataStreams);
}
/**
* Validates that the data buffer has the same contents as the source buffer,
* starting the checks in the src at offset and for count bytes.
* @param src The source of the data
* @param data The data which should be checked against the source
* @param offset The starting point in the src buffer
* @param count How many bytes to check.
*/
private void validateContents(ByteBuffer src, ByteBuffer data, int offset,
int count) {
byte[] srcArray = src.array();
assertEquals(count, data.remaining());
for (int i = offset; i < offset + count; i++) {
assertEquals(srcArray[i], data.get(), "Element " + i);
}
data.flip();
}
/**
* Return a list of num ByteBuffers of the given size.
* @param num Number of buffers to create
* @param size The size of each buffer
* @return
*/
private ByteBuffer[] allocateBuffers(int num, int size) {
ByteBuffer[] bufs = new ByteBuffer[num];
for (int i = 0; i < num; i++) {
bufs[i] = ByteBuffer.allocate(size);
}
return bufs;
}
private int stripeSize() {
return stripeSize(repConfig);
}
private int stripeSize(ECReplicationConfig rconfig) {
return rconfig.getEcChunkSize() * rconfig.getData();
}
private void clearBuffers(ByteBuffer[] bufs) {
for (ByteBuffer b : bufs) {
b.clear();
}
}
private ByteBuffer[] allocateByteBuffers(ECReplicationConfig rConfig) {
return allocateByteBuffers(rConfig.getData(), rConfig.getEcChunkSize());
}
private ByteBuffer[] allocateByteBuffers(int count, int capacity) {
ByteBuffer[] bufs = new ByteBuffer[count];
for (int i = 0; i < bufs.length; i++) {
bufs[i] = ByteBuffer.allocate(capacity);
}
return bufs;
}
private List<Integer> getOutputIndexes(Set<Integer> recoveryIndexes) {
return recoveryIndexes.isEmpty()
? Arrays.asList(0, 1, 2)
: new ArrayList<>(recoveryIndexes);
}
private static Set<Integer> toBufferIndexes(Collection<Integer> dnIdxs) {
return dnIdxs.stream()
.mapToInt(Integer::intValue)
.map(i -> i - 1)
.boxed()
.collect(toSet());
}
}
|
openjdk/jdk8 | 36,360 | jdk/src/share/classes/java/nio/file/Path.java | /*
* Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package java.nio.file;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.util.Iterator;
/**
* An object that may be used to locate a file in a file system. It will
* typically represent a system dependent file path.
*
* <p> A {@code Path} represents a path that is hierarchical and composed of a
* sequence of directory and file name elements separated by a special separator
* or delimiter. A <em>root component</em>, that identifies a file system
* hierarchy, may also be present. The name element that is <em>farthest</em>
* from the root of the directory hierarchy is the name of a file or directory.
* The other name elements are directory names. A {@code Path} can represent a
* root, a root and a sequence of names, or simply one or more name elements.
* A {@code Path} is considered to be an <i>empty path</i> if it consists
* solely of one name element that is empty. Accessing a file using an
* <i>empty path</i> is equivalent to accessing the default directory of the
* file system. {@code Path} defines the {@link #getFileName() getFileName},
* {@link #getParent getParent}, {@link #getRoot getRoot}, and {@link #subpath
* subpath} methods to access the path components or a subsequence of its name
* elements.
*
* <p> In addition to accessing the components of a path, a {@code Path} also
* defines the {@link #resolve(Path) resolve} and {@link #resolveSibling(Path)
* resolveSibling} methods to combine paths. The {@link #relativize relativize}
* method that can be used to construct a relative path between two paths.
* Paths can be {@link #compareTo compared}, and tested against each other using
* the {@link #startsWith startsWith} and {@link #endsWith endsWith} methods.
*
* <p> This interface extends {@link Watchable} interface so that a directory
* located by a path can be {@link #register registered} with a {@link
* WatchService} and entries in the directory watched. </p>
*
* <p> <b>WARNING:</b> This interface is only intended to be implemented by
* those developing custom file system implementations. Methods may be added to
* this interface in future releases. </p>
*
* <h2>Accessing Files</h2>
* <p> Paths may be used with the {@link Files} class to operate on files,
* directories, and other types of files. For example, suppose we want a {@link
* java.io.BufferedReader} to read text from a file "{@code access.log}". The
* file is located in a directory "{@code logs}" relative to the current working
* directory and is UTF-8 encoded.
* <pre>
* Path path = FileSystems.getDefault().getPath("logs", "access.log");
* BufferedReader reader = Files.newBufferedReader(path, StandardCharsets.UTF_8);
* </pre>
*
* <a name="interop"></a><h2>Interoperability</h2>
* <p> Paths associated with the default {@link
* java.nio.file.spi.FileSystemProvider provider} are generally interoperable
* with the {@link java.io.File java.io.File} class. Paths created by other
* providers are unlikely to be interoperable with the abstract path names
* represented by {@code java.io.File}. The {@link java.io.File#toPath toPath}
* method may be used to obtain a {@code Path} from the abstract path name
* represented by a {@code java.io.File} object. The resulting {@code Path} can
* be used to operate on the same file as the {@code java.io.File} object. In
* addition, the {@link #toFile toFile} method is useful to construct a {@code
* File} from the {@code String} representation of a {@code Path}.
*
* <h2>Concurrency</h2>
* <p> Implementations of this interface are immutable and safe for use by
* multiple concurrent threads.
*
* @since 1.7
* @see Paths
*/
public interface Path
extends Comparable<Path>, Iterable<Path>, Watchable
{
/**
* Returns the file system that created this object.
*
* @return the file system that created this object
*/
FileSystem getFileSystem();
/**
* Tells whether or not this path is absolute.
*
* <p> An absolute path is complete in that it doesn't need to be combined
* with other path information in order to locate a file.
*
* @return {@code true} if, and only if, this path is absolute
*/
boolean isAbsolute();
/**
* Returns the root component of this path as a {@code Path} object,
* or {@code null} if this path does not have a root component.
*
* @return a path representing the root component of this path,
* or {@code null}
*/
Path getRoot();
/**
* Returns the name of the file or directory denoted by this path as a
* {@code Path} object. The file name is the <em>farthest</em> element from
* the root in the directory hierarchy.
*
* @return a path representing the name of the file or directory, or
* {@code null} if this path has zero elements
*/
Path getFileName();
/**
* Returns the <em>parent path</em>, or {@code null} if this path does not
* have a parent.
*
* <p> The parent of this path object consists of this path's root
* component, if any, and each element in the path except for the
* <em>farthest</em> from the root in the directory hierarchy. This method
* does not access the file system; the path or its parent may not exist.
* Furthermore, this method does not eliminate special names such as "."
* and ".." that may be used in some implementations. On UNIX for example,
* the parent of "{@code /a/b/c}" is "{@code /a/b}", and the parent of
* {@code "x/y/.}" is "{@code x/y}". This method may be used with the {@link
* #normalize normalize} method, to eliminate redundant names, for cases where
* <em>shell-like</em> navigation is required.
*
* <p> If this path has one or more elements, and no root component, then
* this method is equivalent to evaluating the expression:
* <blockquote><pre>
* subpath(0, getNameCount()-1);
* </pre></blockquote>
*
* @return a path representing the path's parent
*/
Path getParent();
/**
* Returns the number of name elements in the path.
*
* @return the number of elements in the path, or {@code 0} if this path
* only represents a root component
*/
int getNameCount();
/**
* Returns a name element of this path as a {@code Path} object.
*
* <p> The {@code index} parameter is the index of the name element to return.
* The element that is <em>closest</em> to the root in the directory hierarchy
* has index {@code 0}. The element that is <em>farthest</em> from the root
* has index {@link #getNameCount count}{@code -1}.
*
* @param index
* the index of the element
*
* @return the name element
*
* @throws IllegalArgumentException
* if {@code index} is negative, {@code index} is greater than or
* equal to the number of elements, or this path has zero name
* elements
*/
Path getName(int index);
/**
* Returns a relative {@code Path} that is a subsequence of the name
* elements of this path.
*
* <p> The {@code beginIndex} and {@code endIndex} parameters specify the
* subsequence of name elements. The name that is <em>closest</em> to the root
* in the directory hierarchy has index {@code 0}. The name that is
* <em>farthest</em> from the root has index {@link #getNameCount
* count}{@code -1}. The returned {@code Path} object has the name elements
* that begin at {@code beginIndex} and extend to the element at index {@code
* endIndex-1}.
*
* @param beginIndex
* the index of the first element, inclusive
* @param endIndex
* the index of the last element, exclusive
*
* @return a new {@code Path} object that is a subsequence of the name
* elements in this {@code Path}
*
* @throws IllegalArgumentException
* if {@code beginIndex} is negative, or greater than or equal to
* the number of elements. If {@code endIndex} is less than or
* equal to {@code beginIndex}, or larger than the number of elements.
*/
Path subpath(int beginIndex, int endIndex);
/**
* Tests if this path starts with the given path.
*
* <p> This path <em>starts</em> with the given path if this path's root
* component <em>starts</em> with the root component of the given path,
* and this path starts with the same name elements as the given path.
* If the given path has more name elements than this path then {@code false}
* is returned.
*
* <p> Whether or not the root component of this path starts with the root
* component of the given path is file system specific. If this path does
* not have a root component and the given path has a root component then
* this path does not start with the given path.
*
* <p> If the given path is associated with a different {@code FileSystem}
* to this path then {@code false} is returned.
*
* @param other
* the given path
*
* @return {@code true} if this path starts with the given path; otherwise
* {@code false}
*/
boolean startsWith(Path other);
/**
* Tests if this path starts with a {@code Path}, constructed by converting
* the given path string, in exactly the manner specified by the {@link
* #startsWith(Path) startsWith(Path)} method. On UNIX for example, the path
* "{@code foo/bar}" starts with "{@code foo}" and "{@code foo/bar}". It
* does not start with "{@code f}" or "{@code fo}".
*
* @param other
* the given path string
*
* @return {@code true} if this path starts with the given path; otherwise
* {@code false}
*
* @throws InvalidPathException
* If the path string cannot be converted to a Path.
*/
boolean startsWith(String other);
/**
* Tests if this path ends with the given path.
*
* <p> If the given path has <em>N</em> elements, and no root component,
* and this path has <em>N</em> or more elements, then this path ends with
* the given path if the last <em>N</em> elements of each path, starting at
* the element farthest from the root, are equal.
*
* <p> If the given path has a root component then this path ends with the
* given path if the root component of this path <em>ends with</em> the root
* component of the given path, and the corresponding elements of both paths
* are equal. Whether or not the root component of this path ends with the
* root component of the given path is file system specific. If this path
* does not have a root component and the given path has a root component
* then this path does not end with the given path.
*
* <p> If the given path is associated with a different {@code FileSystem}
* to this path then {@code false} is returned.
*
* @param other
* the given path
*
* @return {@code true} if this path ends with the given path; otherwise
* {@code false}
*/
boolean endsWith(Path other);
/**
* Tests if this path ends with a {@code Path}, constructed by converting
* the given path string, in exactly the manner specified by the {@link
* #endsWith(Path) endsWith(Path)} method. On UNIX for example, the path
* "{@code foo/bar}" ends with "{@code foo/bar}" and "{@code bar}". It does
* not end with "{@code r}" or "{@code /bar}". Note that trailing separators
* are not taken into account, and so invoking this method on the {@code
* Path}"{@code foo/bar}" with the {@code String} "{@code bar/}" returns
* {@code true}.
*
* @param other
* the given path string
*
* @return {@code true} if this path ends with the given path; otherwise
* {@code false}
*
* @throws InvalidPathException
* If the path string cannot be converted to a Path.
*/
boolean endsWith(String other);
/**
* Returns a path that is this path with redundant name elements eliminated.
*
* <p> The precise definition of this method is implementation dependent but
* in general it derives from this path, a path that does not contain
* <em>redundant</em> name elements. In many file systems, the "{@code .}"
* and "{@code ..}" are special names used to indicate the current directory
* and parent directory. In such file systems all occurrences of "{@code .}"
* are considered redundant. If a "{@code ..}" is preceded by a
* non-"{@code ..}" name then both names are considered redundant (the
* process to identify such names is repeated until it is no longer
* applicable).
*
* <p> This method does not access the file system; the path may not locate
* a file that exists. Eliminating "{@code ..}" and a preceding name from a
* path may result in the path that locates a different file than the original
* path. This can arise when the preceding name is a symbolic link.
*
* @return the resulting path or this path if it does not contain
* redundant name elements; an empty path is returned if this path
* does have a root component and all name elements are redundant
*
* @see #getParent
* @see #toRealPath
*/
Path normalize();
// -- resolution and relativization --
/**
* Resolve the given path against this path.
*
* <p> If the {@code other} parameter is an {@link #isAbsolute() absolute}
* path then this method trivially returns {@code other}. If {@code other}
* is an <i>empty path</i> then this method trivially returns this path.
* Otherwise this method considers this path to be a directory and resolves
* the given path against this path. In the simplest case, the given path
* does not have a {@link #getRoot root} component, in which case this method
* <em>joins</em> the given path to this path and returns a resulting path
* that {@link #endsWith ends} with the given path. Where the given path has
* a root component then resolution is highly implementation dependent and
* therefore unspecified.
*
* @param other
* the path to resolve against this path
*
* @return the resulting path
*
* @see #relativize
*/
Path resolve(Path other);
/**
* Converts a given path string to a {@code Path} and resolves it against
* this {@code Path} in exactly the manner specified by the {@link
* #resolve(Path) resolve} method. For example, suppose that the name
* separator is "{@code /}" and a path represents "{@code foo/bar}", then
* invoking this method with the path string "{@code gus}" will result in
* the {@code Path} "{@code foo/bar/gus}".
*
* @param other
* the path string to resolve against this path
*
* @return the resulting path
*
* @throws InvalidPathException
* if the path string cannot be converted to a Path.
*
* @see FileSystem#getPath
*/
Path resolve(String other);
/**
* Resolves the given path against this path's {@link #getParent parent}
* path. This is useful where a file name needs to be <i>replaced</i> with
* another file name. For example, suppose that the name separator is
* "{@code /}" and a path represents "{@code dir1/dir2/foo}", then invoking
* this method with the {@code Path} "{@code bar}" will result in the {@code
* Path} "{@code dir1/dir2/bar}". If this path does not have a parent path,
* or {@code other} is {@link #isAbsolute() absolute}, then this method
* returns {@code other}. If {@code other} is an empty path then this method
* returns this path's parent, or where this path doesn't have a parent, the
* empty path.
*
* @param other
* the path to resolve against this path's parent
*
* @return the resulting path
*
* @see #resolve(Path)
*/
Path resolveSibling(Path other);
/**
* Converts a given path string to a {@code Path} and resolves it against
* this path's {@link #getParent parent} path in exactly the manner
* specified by the {@link #resolveSibling(Path) resolveSibling} method.
*
* @param other
* the path string to resolve against this path's parent
*
* @return the resulting path
*
* @throws InvalidPathException
* if the path string cannot be converted to a Path.
*
* @see FileSystem#getPath
*/
Path resolveSibling(String other);
/**
* Constructs a relative path between this path and a given path.
*
* <p> Relativization is the inverse of {@link #resolve(Path) resolution}.
* This method attempts to construct a {@link #isAbsolute relative} path
* that when {@link #resolve(Path) resolved} against this path, yields a
* path that locates the same file as the given path. For example, on UNIX,
* if this path is {@code "/a/b"} and the given path is {@code "/a/b/c/d"}
* then the resulting relative path would be {@code "c/d"}. Where this
* path and the given path do not have a {@link #getRoot root} component,
* then a relative path can be constructed. A relative path cannot be
* constructed if only one of the paths have a root component. Where both
* paths have a root component then it is implementation dependent if a
* relative path can be constructed. If this path and the given path are
* {@link #equals equal} then an <i>empty path</i> is returned.
*
* <p> For any two {@link #normalize normalized} paths <i>p</i> and
* <i>q</i>, where <i>q</i> does not have a root component,
* <blockquote>
* <i>p</i><tt>.relativize(</tt><i>p</i><tt>.resolve(</tt><i>q</i><tt>)).equals(</tt><i>q</i><tt>)</tt>
* </blockquote>
*
* <p> When symbolic links are supported, then whether the resulting path,
* when resolved against this path, yields a path that can be used to locate
* the {@link Files#isSameFile same} file as {@code other} is implementation
* dependent. For example, if this path is {@code "/a/b"} and the given
* path is {@code "/a/x"} then the resulting relative path may be {@code
* "../x"}. If {@code "b"} is a symbolic link then is implementation
* dependent if {@code "a/b/../x"} would locate the same file as {@code "/a/x"}.
*
* @param other
* the path to relativize against this path
*
* @return the resulting relative path, or an empty path if both paths are
* equal
*
* @throws IllegalArgumentException
* if {@code other} is not a {@code Path} that can be relativized
* against this path
*/
Path relativize(Path other);
/**
* Returns a URI to represent this path.
*
* <p> This method constructs an absolute {@link URI} with a {@link
* URI#getScheme() scheme} equal to the URI scheme that identifies the
* provider. The exact form of the scheme specific part is highly provider
* dependent.
*
* <p> In the case of the default provider, the URI is hierarchical with
* a {@link URI#getPath() path} component that is absolute. The query and
* fragment components are undefined. Whether the authority component is
* defined or not is implementation dependent. There is no guarantee that
* the {@code URI} may be used to construct a {@link java.io.File java.io.File}.
* In particular, if this path represents a Universal Naming Convention (UNC)
* path, then the UNC server name may be encoded in the authority component
* of the resulting URI. In the case of the default provider, and the file
* exists, and it can be determined that the file is a directory, then the
* resulting {@code URI} will end with a slash.
*
* <p> The default provider provides a similar <em>round-trip</em> guarantee
* to the {@link java.io.File} class. For a given {@code Path} <i>p</i> it
* is guaranteed that
* <blockquote><tt>
* {@link Paths#get(URI) Paths.get}(</tt><i>p</i><tt>.toUri()).equals(</tt><i>p</i>
* <tt>.{@link #toAbsolutePath() toAbsolutePath}())</tt>
* </blockquote>
* so long as the original {@code Path}, the {@code URI}, and the new {@code
* Path} are all created in (possibly different invocations of) the same
* Java virtual machine. Whether other providers make any guarantees is
* provider specific and therefore unspecified.
*
* <p> When a file system is constructed to access the contents of a file
* as a file system then it is highly implementation specific if the returned
* URI represents the given path in the file system or it represents a
* <em>compound</em> URI that encodes the URI of the enclosing file system.
* A format for compound URIs is not defined in this release; such a scheme
* may be added in a future release.
*
* @return the URI representing this path
*
* @throws java.io.IOError
* if an I/O error occurs obtaining the absolute path, or where a
* file system is constructed to access the contents of a file as
* a file system, and the URI of the enclosing file system cannot be
* obtained
*
* @throws SecurityException
* In the case of the default provider, and a security manager
* is installed, the {@link #toAbsolutePath toAbsolutePath} method
* throws a security exception.
*/
URI toUri();
/**
* Returns a {@code Path} object representing the absolute path of this
* path.
*
* <p> If this path is already {@link Path#isAbsolute absolute} then this
* method simply returns this path. Otherwise, this method resolves the path
* in an implementation dependent manner, typically by resolving the path
* against a file system default directory. Depending on the implementation,
* this method may throw an I/O error if the file system is not accessible.
*
* @return a {@code Path} object representing the absolute path
*
* @throws java.io.IOError
* if an I/O error occurs
* @throws SecurityException
* In the case of the default provider, a security manager
* is installed, and this path is not absolute, then the security
* manager's {@link SecurityManager#checkPropertyAccess(String)
* checkPropertyAccess} method is invoked to check access to the
* system property {@code user.dir}
*/
Path toAbsolutePath();
/**
* Returns the <em>real</em> path of an existing file.
*
* <p> The precise definition of this method is implementation dependent but
* in general it derives from this path, an {@link #isAbsolute absolute}
* path that locates the {@link Files#isSameFile same} file as this path, but
* with name elements that represent the actual name of the directories
* and the file. For example, where filename comparisons on a file system
* are case insensitive then the name elements represent the names in their
* actual case. Additionally, the resulting path has redundant name
* elements removed.
*
* <p> If this path is relative then its absolute path is first obtained,
* as if by invoking the {@link #toAbsolutePath toAbsolutePath} method.
*
* <p> The {@code options} array may be used to indicate how symbolic links
* are handled. By default, symbolic links are resolved to their final
* target. If the option {@link LinkOption#NOFOLLOW_LINKS NOFOLLOW_LINKS} is
* present then this method does not resolve symbolic links.
*
* Some implementations allow special names such as "{@code ..}" to refer to
* the parent directory. When deriving the <em>real path</em>, and a
* "{@code ..}" (or equivalent) is preceded by a non-"{@code ..}" name then
* an implementation will typically cause both names to be removed. When
* not resolving symbolic links and the preceding name is a symbolic link
* then the names are only removed if it guaranteed that the resulting path
* will locate the same file as this path.
*
* @param options
* options indicating how symbolic links are handled
*
* @return an absolute path represent the <em>real</em> path of the file
* located by this object
*
* @throws IOException
* if the file does not exist or an I/O error occurs
* @throws SecurityException
* In the case of the default provider, and a security manager
* is installed, its {@link SecurityManager#checkRead(String) checkRead}
* method is invoked to check read access to the file, and where
* this path is not absolute, its {@link SecurityManager#checkPropertyAccess(String)
* checkPropertyAccess} method is invoked to check access to the
* system property {@code user.dir}
*/
Path toRealPath(LinkOption... options) throws IOException;
/**
* Returns a {@link File} object representing this path. Where this {@code
* Path} is associated with the default provider, then this method is
* equivalent to returning a {@code File} object constructed with the
* {@code String} representation of this path.
*
* <p> If this path was created by invoking the {@code File} {@link
* File#toPath toPath} method then there is no guarantee that the {@code
* File} object returned by this method is {@link #equals equal} to the
* original {@code File}.
*
* @return a {@code File} object representing this path
*
* @throws UnsupportedOperationException
* if this {@code Path} is not associated with the default provider
*/
File toFile();
// -- watchable --
/**
* Registers the file located by this path with a watch service.
*
* <p> In this release, this path locates a directory that exists. The
* directory is registered with the watch service so that entries in the
* directory can be watched. The {@code events} parameter is the events to
* register and may contain the following events:
* <ul>
* <li>{@link StandardWatchEventKinds#ENTRY_CREATE ENTRY_CREATE} -
* entry created or moved into the directory</li>
* <li>{@link StandardWatchEventKinds#ENTRY_DELETE ENTRY_DELETE} -
* entry deleted or moved out of the directory</li>
* <li>{@link StandardWatchEventKinds#ENTRY_MODIFY ENTRY_MODIFY} -
* entry in directory was modified</li>
* </ul>
*
* <p> The {@link WatchEvent#context context} for these events is the
* relative path between the directory located by this path, and the path
* that locates the directory entry that is created, deleted, or modified.
*
* <p> The set of events may include additional implementation specific
* event that are not defined by the enum {@link StandardWatchEventKinds}
*
* <p> The {@code modifiers} parameter specifies <em>modifiers</em> that
* qualify how the directory is registered. This release does not define any
* <em>standard</em> modifiers. It may contain implementation specific
* modifiers.
*
* <p> Where a file is registered with a watch service by means of a symbolic
* link then it is implementation specific if the watch continues to depend
* on the existence of the symbolic link after it is registered.
*
* @param watcher
* the watch service to which this object is to be registered
* @param events
* the events for which this object should be registered
* @param modifiers
* the modifiers, if any, that modify how the object is registered
*
* @return a key representing the registration of this object with the
* given watch service
*
* @throws UnsupportedOperationException
* if unsupported events or modifiers are specified
* @throws IllegalArgumentException
* if an invalid combination of events or modifiers is specified
* @throws ClosedWatchServiceException
* if the watch service is closed
* @throws NotDirectoryException
* if the file is registered to watch the entries in a directory
* and the file is not a directory <i>(optional specific exception)</i>
* @throws IOException
* if an I/O error occurs
* @throws SecurityException
* In the case of the default provider, and a security manager is
* installed, the {@link SecurityManager#checkRead(String) checkRead}
* method is invoked to check read access to the file.
*/
@Override
WatchKey register(WatchService watcher,
WatchEvent.Kind<?>[] events,
WatchEvent.Modifier... modifiers)
throws IOException;
/**
* Registers the file located by this path with a watch service.
*
* <p> An invocation of this method behaves in exactly the same way as the
* invocation
* <pre>
* watchable.{@link #register(WatchService,WatchEvent.Kind[],WatchEvent.Modifier[]) register}(watcher, events, new WatchEvent.Modifier[0]);
* </pre>
*
* <p> <b>Usage Example:</b>
* Suppose we wish to register a directory for entry create, delete, and modify
* events:
* <pre>
* Path dir = ...
* WatchService watcher = ...
*
* WatchKey key = dir.register(watcher, ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY);
* </pre>
* @param watcher
* The watch service to which this object is to be registered
* @param events
* The events for which this object should be registered
*
* @return A key representing the registration of this object with the
* given watch service
*
* @throws UnsupportedOperationException
* If unsupported events are specified
* @throws IllegalArgumentException
* If an invalid combination of events is specified
* @throws ClosedWatchServiceException
* If the watch service is closed
* @throws NotDirectoryException
* If the file is registered to watch the entries in a directory
* and the file is not a directory <i>(optional specific exception)</i>
* @throws IOException
* If an I/O error occurs
* @throws SecurityException
* In the case of the default provider, and a security manager is
* installed, the {@link SecurityManager#checkRead(String) checkRead}
* method is invoked to check read access to the file.
*/
@Override
WatchKey register(WatchService watcher,
WatchEvent.Kind<?>... events)
throws IOException;
// -- Iterable --
/**
* Returns an iterator over the name elements of this path.
*
* <p> The first element returned by the iterator represents the name
* element that is closest to the root in the directory hierarchy, the
* second element is the next closest, and so on. The last element returned
* is the name of the file or directory denoted by this path. The {@link
* #getRoot root} component, if present, is not returned by the iterator.
*
* @return an iterator over the name elements of this path.
*/
@Override
Iterator<Path> iterator();
// -- compareTo/equals/hashCode --
/**
* Compares two abstract paths lexicographically. The ordering defined by
* this method is provider specific, and in the case of the default
* provider, platform specific. This method does not access the file system
* and neither file is required to exist.
*
* <p> This method may not be used to compare paths that are associated
* with different file system providers.
*
* @param other the path compared to this path.
*
* @return zero if the argument is {@link #equals equal} to this path, a
* value less than zero if this path is lexicographically less than
* the argument, or a value greater than zero if this path is
* lexicographically greater than the argument
*
* @throws ClassCastException
* if the paths are associated with different providers
*/
@Override
int compareTo(Path other);
/**
* Tests this path for equality with the given object.
*
* <p> If the given object is not a Path, or is a Path associated with a
* different {@code FileSystem}, then this method returns {@code false}.
*
* <p> Whether or not two path are equal depends on the file system
* implementation. In some cases the paths are compared without regard
* to case, and others are case sensitive. This method does not access the
* file system and the file is not required to exist. Where required, the
* {@link Files#isSameFile isSameFile} method may be used to check if two
* paths locate the same file.
*
* <p> This method satisfies the general contract of the {@link
* java.lang.Object#equals(Object) Object.equals} method. </p>
*
* @param other
* the object to which this object is to be compared
*
* @return {@code true} if, and only if, the given object is a {@code Path}
* that is identical to this {@code Path}
*/
boolean equals(Object other);
/**
* Computes a hash code for this path.
*
* <p> The hash code is based upon the components of the path, and
* satisfies the general contract of the {@link Object#hashCode
* Object.hashCode} method.
*
* @return the hash-code value for this path
*/
int hashCode();
/**
* Returns the string representation of this path.
*
* <p> If this path was created by converting a path string using the
* {@link FileSystem#getPath getPath} method then the path string returned
* by this method may differ from the original String used to create the path.
*
* <p> The returned path string uses the default name {@link
* FileSystem#getSeparator separator} to separate names in the path.
*
* @return the string representation of this path
*/
String toString();
}
|
apache/james-mime4j | 36,205 | dom/src/main/java/org/apache/james/mime4j/dom/Message.java | /****************************************************************
* Licensed to the Apache Software Foundation (ASF) under one *
* or more contributor license agreements. See the NOTICE file *
* distributed with this work for additional information *
* regarding copyright ownership. The ASF licenses this file *
* to you under the Apache License, Version 2.0 (the *
* "License"); you may not use this file except in compliance *
* with the License. You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, *
* software distributed under the License is distributed on an *
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *
* KIND, either express or implied. See the License for the *
* specific language governing permissions and limitations *
* under the License. *
****************************************************************/
package org.apache.james.mime4j.dom;
import java.util.Date;
import org.apache.james.mime4j.dom.address.AddressList;
import org.apache.james.mime4j.dom.address.Mailbox;
import org.apache.james.mime4j.dom.address.MailboxList;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.TimeZone;
import org.apache.james.mime4j.MimeException;
import org.apache.james.mime4j.MimeIOException;
import org.apache.james.mime4j.codec.DecodeMonitor;
import org.apache.james.mime4j.dom.address.Address;
import org.apache.james.mime4j.dom.field.AddressListField;
import org.apache.james.mime4j.dom.field.DateTimeField;
import org.apache.james.mime4j.dom.field.FieldName;
import org.apache.james.mime4j.dom.field.MailboxField;
import org.apache.james.mime4j.dom.field.MailboxListField;
import org.apache.james.mime4j.dom.field.ParseException;
import org.apache.james.mime4j.dom.field.UnstructuredField;
import org.apache.james.mime4j.field.DefaultFieldParser;
import org.apache.james.mime4j.field.Fields;
import org.apache.james.mime4j.field.LenientFieldParser;
import org.apache.james.mime4j.field.address.DefaultAddressParser;
import org.apache.james.mime4j.io.InputStreams;
import org.apache.james.mime4j.internal.AbstractEntityBuilder;
import org.apache.james.mime4j.message.BasicBodyFactory;
import org.apache.james.mime4j.message.BodyFactory;
import org.apache.james.mime4j.message.DefaultBodyDescriptorBuilder;
import org.apache.james.mime4j.message.HeaderImpl;
import org.apache.james.mime4j.message.MessageImpl;
import org.apache.james.mime4j.message.MultipartBuilder;
import org.apache.james.mime4j.internal.ParserStreamContentHandler;
import org.apache.james.mime4j.parser.MimeStreamParser;
import org.apache.james.mime4j.stream.BodyDescriptorBuilder;
import org.apache.james.mime4j.stream.Field;
import org.apache.james.mime4j.stream.MimeConfig;
import org.apache.james.mime4j.stream.NameValuePair;
/**
* An MIME message (as defined in RFC 2045).
*/
public interface Message extends Entity, Body {
/**
* Returns the value of the <i>Message-ID</i> header field of this message
* or <code>null</code> if it is not present.
*
* @return the identifier of this message.
*/
String getMessageId();
/**
* Returns the (decoded) value of the <i>Subject</i> header field of this
* message or <code>null</code> if it is not present.
*
* @return the subject of this message.
*/
String getSubject();
/**
* Returns the value of the <i>Date</i> header field of this message as
* <code>Date</code> object or <code>null</code> if it is not present.
*
* @return the date of this message.
*/
Date getDate();
/**
* Returns the value of the <i>Sender</i> header field of this message as
* <code>Mailbox</code> object or <code>null</code> if it is not
* present.
*
* @return the sender of this message.
*/
Mailbox getSender();
/**
* Returns the value of the <i>From</i> header field of this message as
* <code>MailboxList</code> object or <code>null</code> if it is not
* present.
*
* @return value of the from field of this message.
*/
MailboxList getFrom();
/**
* Returns the value of the <i>To</i> header field of this message as
* <code>AddressList</code> object or <code>null</code> if it is not
* present.
*
* @return value of the to field of this message.
*/
AddressList getTo();
/**
* Returns the value of the <i>Cc</i> header field of this message as
* <code>AddressList</code> object or <code>null</code> if it is not
* present.
*
* @return value of the cc field of this message.
*/
AddressList getCc();
/**
* Returns the value of the <i>Bcc</i> header field of this message as
* <code>AddressList</code> object or <code>null</code> if it is not
* present.
*
* @return value of the bcc field of this message.
*/
AddressList getBcc();
/**
* Returns the value of the <i>Reply-To</i> header field of this message as
* <code>AddressList</code> object or <code>null</code> if it is not
* present.
*
* @return value of the reply to field of this message.
*/
AddressList getReplyTo();
class Builder extends AbstractEntityBuilder {
private MimeConfig config;
private DecodeMonitor monitor;
private BodyDescriptorBuilder bodyDescBuilder;
private FieldParser<?> fieldParser;
private BodyFactory bodyFactory;
private boolean flatMode;
private boolean rawContent;
private Builder() {
super();
}
public static Builder of() {
return new Builder();
}
public static Builder of(Message other) {
return new Builder().copy(other);
}
public static Builder of(final InputStream is) throws IOException {
return new Builder().parse(is);
}
/**
* Sets MIME configuration.
*
* @param config the configuration.
*/
public Builder use(MimeConfig config) {
this.config = config;
return this;
}
/**
* Sets {@link org.apache.james.mime4j.codec.DecodeMonitor} that will be
* used to handle malformed data when executing {@link #parse(java.io.InputStream)}.
*
* @param monitor the decoder monitor.
*/
public Builder use(DecodeMonitor monitor) {
this.monitor = monitor;
return this;
}
/**
* Sets {@link org.apache.james.mime4j.stream.BodyDescriptorBuilder} that will be
* used to generate body descriptors when executing {@link #parse(java.io.InputStream)}.
*
* @param bodyDescBuilder the body descriptor builder.
*/
public Builder use(BodyDescriptorBuilder bodyDescBuilder) {
this.bodyDescBuilder = bodyDescBuilder;
return this;
}
/**
* Sets {@link org.apache.james.mime4j.dom.FieldParser} that will be
* used to generate parse message fields when executing {@link #parse(java.io.InputStream)}.
*
* @param fieldParser the field parser.
*/
public Builder use(FieldParser<?> fieldParser) {
this.fieldParser = fieldParser;
return this;
}
/**
* Sets {@link org.apache.james.mime4j.message.BodyFactory} that will be
* used to generate message body.
*
* @param bodyFactory the body factory.
*/
public Builder use(BodyFactory bodyFactory) {
this.bodyFactory = bodyFactory;
return this;
}
/**
* Enables flat parsing mode for {@link #parse(java.io.InputStream)} operation.
*/
public Builder enableFlatMode() {
this.flatMode = true;
return this;
}
/**
* Disables flat parsing mode for {@link #parse(java.io.InputStream)} operation.
*/
public Builder disableFlatMode() {
this.flatMode = false;
return this;
}
/**
* Enables automatic content decoding for {@link #parse(java.io.InputStream)} operation.
*/
public Builder enableContentDecoding() {
this.rawContent = false;
return this;
}
/**
* Enables disable content decoding for {@link #parse(java.io.InputStream)} operation.
*/
public Builder disableContentDecoding() {
this.rawContent = true;
return this;
}
public Builder copy(Message other) {
if (other == null) {
return this;
}
clearFields();
final Header otherHeader = other.getHeader();
if (otherHeader != null) {
final List<Field> otherFields = otherHeader.getFields();
for (Field field: otherFields) {
addField(field);
}
}
Body body = null;
Body otherBody = other.getBody();
if (otherBody instanceof Message) {
body = Builder.of((Message) otherBody).build();
} else if (otherBody instanceof Multipart) {
body = MultipartBuilder.createCopy((Multipart) otherBody).build();
} else if (otherBody instanceof SingleBody) {
body = ((SingleBody) otherBody).copy();
}
setBody(body);
return this;
}
@Override
public Builder setField(Field field) {
super.setField(field);
return this;
}
@Override
public Builder addField(Field field) {
super.addField(field);
return this;
}
@Override
public Builder removeFields(String name) {
super.removeFields(name);
return this;
}
@Override
public Builder clearFields() {
super.clearFields();
return this;
}
@Override
public Builder setContentTransferEncoding(String contentTransferEncoding) {
super.setContentTransferEncoding(contentTransferEncoding);
return this;
}
@Override
public Builder setContentType(String mimeType, NameValuePair... parameters) {
super.setContentType(mimeType, parameters);
return this;
}
@Override
public Builder setContentDisposition(String dispositionType) {
super.setContentDisposition(dispositionType);
return this;
}
@Override
public Builder setContentDisposition(String dispositionType, String filename) {
super.setContentDisposition(dispositionType, filename);
return this;
}
@Override
public Builder setContentDisposition(String dispositionType, String filename, long size) {
super.setContentDisposition(dispositionType, filename, size);
return this;
}
@Override
public Builder setContentDisposition(String dispositionType, String filename, long size,
Date creationDate, Date modificationDate, Date readDate) {
super.setContentDisposition(dispositionType, filename, size, creationDate, modificationDate, readDate);
return this;
}
@Override
public Builder setBody(Body body) {
super.setBody(body);
return this;
}
@Override
public Builder setBody(TextBody textBody) {
super.setBody(textBody);
return this;
}
@Override
public Builder setBody(BinaryBody binaryBody) {
super.setBody(binaryBody);
return this;
}
@Override
public Builder setBody(Multipart multipart) {
super.setBody(multipart);
return this;
}
@Override
public Builder setBody(Message message) {
super.setBody(message);
return this;
}
/**
* Sets text of this message with the charset.
*
* @param text
* the text.
* @param charset
* the charset of the text.
*/
public Builder setBody(String text, Charset charset) throws IOException {
return setBody(text, null, charset);
}
/**
* Sets text of this message with the given MIME subtype and charset.
*
* @param text
* the text.
* @param charset
* the charset of the text.
* @param subtype
* the text subtype (e.g. "plain", "html" or
* "xml").
*/
public Builder setBody(String text, String subtype, Charset charset) throws IOException {
String mimeType = "text/" + (subtype != null ? subtype : "plain");
if (charset != null) {
setField(Fields.contentType(mimeType, new NameValuePair("charset", charset.name())));
} else {
setField(Fields.contentType(mimeType));
}
Body textBody;
if (bodyFactory != null) {
textBody = bodyFactory.textBody(
InputStreams.create(text, charset),
charset != null ? charset.name() : null);
} else {
textBody = BasicBodyFactory.INSTANCE.textBody(text, charset);
}
return setBody(textBody);
}
/**
* Sets binary content of this message with the given MIME type.
*
* @param bin
* the body.
* @param mimeType
* the MIME media type of the specified body
* ("type/subtype").
*/
public Builder setBody(byte[] bin, String mimeType) throws IOException {
setField(Fields.contentType(mimeType != null ? mimeType : "application/octet-stream"));
Body binBody;
if (bodyFactory != null) {
binBody = bodyFactory.binaryBody(InputStreams.create(bin));
} else {
binBody = BasicBodyFactory.INSTANCE.binaryBody(bin);
}
return setBody(binBody);
}
/**
* Returns the value of the <i>Message-ID</i> header field of this message
* or <code>null</code> if it is not present.
*
* @return the identifier of this message.
*/
public String getMessageId() {
Field field = obtainField(FieldName.MESSAGE_ID_LOWERCASE);
return field != null ? field.getBody() : null;
}
/**
* Generates and sets message ID for this message.
*
* @param hostname
* host name to be included in the identifier or
* <code>null</code> if no host name should be included.
*/
public Builder generateMessageId(String hostname) {
if (hostname == null) {
removeFields(FieldName.MESSAGE_ID);
} else {
setField(Fields.generateMessageId(hostname));
}
return this;
}
/**
* Sets message ID for this message.
*
* @param messageId
* the message ID.
*/
public Builder setMessageId(String messageId) {
if (messageId == null) {
removeFields(FieldName.MESSAGE_ID);
} else {
setField(Fields.messageId(messageId));
}
return this;
}
/**
* Returns the (decoded) value of the <i>Subject</i> header field of this
* message or <code>null</code> if it is not present.
*
* @return the subject of this message.
*/
public String getSubject() {
UnstructuredField field = obtainField(FieldName.SUBJECT_LOWERCASE);
return field != null ? field.getValue() : null;
}
/**
* Sets <i>Subject</i> header field for this message. The specified
* string may contain non-ASCII characters, in which case it gets encoded as
* an 'encoded-word' automatically.
*
* @param subject
* subject to set or <code>null</code> to remove the subject
* header field.
*/
public Builder setSubject(String subject) {
if (subject == null) {
removeFields(FieldName.SUBJECT);
} else {
setField(Fields.subject(subject));
}
return this;
}
/**
* Returns the value of the <i>Date</i> header field of this message as
* <code>Date</code> object or <code>null</code> if it is not present.
*
* @return the date of this message.
*/
public Date getDate() {
DateTimeField field = obtainField(FieldName.DATE_LOWERCASE);
return field != null ? field.getDate() : null;
}
/**
* Sets <i>Date</i> header field for this message. This method uses the
* default <code>TimeZone</code> of this host to encode the specified
* <code>Date</code> object into a string.
*
* @param date
* date to set or <code>null</code> to remove the date header
* field.
*/
public Builder setDate(Date date) {
return setDate(date, null);
}
/**
* Sets <i>Date</i> header field for this message. The specified
* <code>TimeZone</code> is used to encode the specified <code>Date</code>
* object into a string.
*
* @param date
* date to set or <code>null</code> to remove the date header
* field.
* @param zone
* a time zone.
*/
public Builder setDate(Date date, TimeZone zone) {
if (date == null) {
removeFields(FieldName.DATE);
} else {
setField(Fields.date(FieldName.DATE, date, zone));
}
return this;
}
/**
* Returns the value of the <i>Sender</i> header field of this message as
* <code>Mailbox</code> object or <code>null</code> if it is not
* present.
*
* @return the sender of this message.
*/
public Mailbox getSender() {
return getMailbox(FieldName.SENDER_LOWERCASE);
}
/**
* Sets <i>Sender</i> header field of this message to the specified
* mailbox address.
*
* @param sender
* address to set or <code>null</code> to remove the header
* field.
*/
public Builder setSender(Mailbox sender) {
return setMailbox(FieldName.SENDER, sender);
}
/**
* Sets <i>Sender</i> header field of this message to the specified
* mailbox address.
*
* @param sender
* address to set or <code>null</code> to remove the header
* field.
*/
public Builder setSender(String sender) throws ParseException {
return setMailbox(FieldName.SENDER, sender);
}
/**
* Returns the value of the <i>From</i> header field of this message as
* <code>MailboxList</code> object or <code>null</code> if it is not
* present.
*
* @return value of the from field of this message.
*/
public MailboxList getFrom() {
return getMailboxList(FieldName.FROM_LOWERCASE);
}
/**
* Sets <i>From</i> header field of this message to the specified
* mailbox address.
*
* @param from
* address to set or <code>null</code> to remove the header
* field.
*/
public Builder setFrom(Mailbox from) {
return setMailboxList(FieldName.FROM, from);
}
/**
* Sets <i>From</i> header field of this message to the specified
* mailbox address.
*
* @param from
* address to set or <code>null</code> to remove the header
* field.
*/
public Builder setFrom(String from) throws ParseException {
return setMailboxList(FieldName.FROM, from);
}
/**
* Sets <i>From</i> header field of this message to the specified
* mailbox addresses.
*
* @param from
* addresses to set or <code>null</code> or no arguments to
* remove the header field.
*/
public Builder setFrom(Mailbox... from) {
return setMailboxList(FieldName.FROM, from);
}
/**
* Sets <i>From</i> header field of this message to the specified
* mailbox addresses.
*
* @param from
* addresses to set or <code>null</code> or no arguments to
* remove the header field.
*/
public Builder setFrom(String... from) throws ParseException {
return setMailboxList(FieldName.FROM, from);
}
/**
* Sets <i>From</i> header field of this message to the specified
* mailbox addresses.
*
* @param from
* addresses to set or <code>null</code> or an empty collection
* to remove the header field.
*/
public Builder setFrom(Collection<Mailbox> from) {
return setMailboxList(FieldName.FROM, from);
}
/**
* Returns the value of the <i>To</i> header field of this message as
* <code>AddressList</code> object or <code>null</code> if it is not
* present.
*
* @return value of the to field of this message.
*/
public AddressList getTo() {
return getAddressList(FieldName.TO_LOWERCASE);
}
/**
* Sets <i>To</i> header field of this message to the specified
* address.
*
* @param to
* address to set or <code>null</code> to remove the header
* field.
*/
public Builder setTo(Address to) {
return setAddressList(FieldName.TO, to);
}
/**
* Sets <i>To</i> header field of this message to the specified
* address.
*
* @param to
* address to set or <code>null</code> to remove the header
* field.
*/
public Builder setTo(String to) throws ParseException {
return setAddressList(FieldName.TO, to);
}
/**
* Sets <i>To</i> header field of this message to the specified
* addresses.
*
* @param to
* addresses to set or <code>null</code> or no arguments to
* remove the header field.
*/
public Builder setTo(Address... to) {
return setAddressList(FieldName.TO, to);
}
/**
* Sets <i>To</i> header field of this message to the specified
* addresses.
*
* @param to
* addresses to set or <code>null</code> or no arguments to
* remove the header field.
*/
public Builder setTo(String... to) throws ParseException {
return setAddressList(FieldName.TO, to);
}
/**
* Sets <i>To</i> header field of this message to the specified
* addresses.
*
* @param to
* addresses to set or <code>null</code> or an empty collection
* to remove the header field.
*/
public Builder setTo(Collection<? extends Address> to) {
return setAddressList(FieldName.TO, to);
}
/**
* Returns the value of the <i>Cc</i> header field of this message as
* <code>AddressList</code> object or <code>null</code> if it is not
* present.
*
* @return value of the cc field of this message.
*/
public AddressList getCc() {
return getAddressList(FieldName.CC_LOWERCASE);
}
/**
* Sets <i>Cc</i> header field of this message to the specified
* address.
*
* @param cc
* address to set or <code>null</code> to remove the header
* field.
*/
public Builder setCc(Address cc) {
return setAddressList(FieldName.CC, cc);
}
/**
* Sets <i>Cc</i> header field of this message to the specified
* addresses.
*
* @param cc
* addresses to set or <code>null</code> or no arguments to
* remove the header field.
*/
public Builder setCc(Address... cc) {
return setAddressList(FieldName.CC, cc);
}
/**
* Sets <i>Cc</i> header field of this message to the specified
* addresses.
*
* @param cc
* addresses to set or <code>null</code> or an empty collection
* to remove the header field.
*/
public Builder setCc(Collection<? extends Address> cc) {
return setAddressList(FieldName.CC, cc);
}
/**
* Returns the value of the <i>Bcc</i> header field of this message as
* <code>AddressList</code> object or <code>null</code> if it is not
* present.
*
* @return value of the bcc field of this message.
*/
public AddressList getBcc() {
return getAddressList(FieldName.BCC_LOWERCASE);
}
/**
* Sets <i>Bcc</i> header field of this message to the specified
* address.
*
* @param bcc
* address to set or <code>null</code> to remove the header
* field.
*/
public Builder setBcc(Address bcc) {
return setAddressList(FieldName.BCC, bcc);
}
/**
* Sets <i>Bcc</i> header field of this message to the specified
* addresses.
*
* @param bcc
* addresses to set or <code>null</code> or no arguments to
* remove the header field.
*/
public Builder setBcc(Address... bcc) {
return setAddressList(FieldName.BCC, bcc);
}
/**
* Sets <i>Bcc</i> header field of this message to the specified
* addresses.
*
* @param bcc
* addresses to set or <code>null</code> or an empty collection
* to remove the header field.
*/
public Builder setBcc(Collection<? extends Address> bcc) {
return setAddressList(FieldName.BCC, bcc);
}
/**
* Returns the value of the <i>Reply-To</i> header field of this message as
* <code>AddressList</code> object or <code>null</code> if it is not
* present.
*
* @return value of the reply to field of this message.
*/
public AddressList getReplyTo() {
return getAddressList(FieldName.REPLY_TO_LOWERCASE);
}
/**
* Sets <i>Reply-To</i> header field of this message to the specified
* address.
*
* @param replyTo
* address to set or <code>null</code> to remove the header
* field.
*/
public Builder setReplyTo(Address replyTo) {
return setAddressList(FieldName.REPLY_TO, replyTo);
}
/**
* Sets <i>Reply-To</i> header field of this message to the specified
* addresses.
*
* @param replyTo
* addresses to set or <code>null</code> or no arguments to
* remove the header field.
*/
public Builder setReplyTo(Address... replyTo) {
return setAddressList(FieldName.REPLY_TO, replyTo);
}
/**
* Sets <i>Reply-To</i> header field of this message to the specified
* addresses.
*
* @param replyTo
* addresses to set or <code>null</code> or an empty collection
* to remove the header field.
*/
public Builder setReplyTo(Collection<? extends Address> replyTo) {
return setAddressList(FieldName.REPLY_TO, replyTo);
}
public Builder parse(final InputStream is) throws IOException {
MimeConfig currentConfig = config != null ? config : MimeConfig.DEFAULT;
boolean strict = currentConfig.isStrictParsing();
DecodeMonitor currentMonitor = monitor != null ? monitor : strict ? DecodeMonitor.STRICT : DecodeMonitor.SILENT;
BodyDescriptorBuilder currentBodyDescBuilder = bodyDescBuilder != null ? bodyDescBuilder :
new DefaultBodyDescriptorBuilder(null, fieldParser != null ? fieldParser :
strict ? DefaultFieldParser.getParser() : LenientFieldParser.getParser(), currentMonitor);
BodyFactory currentBodyFactory = bodyFactory != null ? bodyFactory : new BasicBodyFactory(!strict);
MimeStreamParser parser = new MimeStreamParser(currentConfig, currentMonitor, currentBodyDescBuilder);
Message message = new MessageImpl();
parser.setContentHandler(new ParserStreamContentHandler(message, currentBodyFactory));
parser.setContentDecoding(!rawContent);
if (flatMode) {
parser.setFlat();
}
try {
parser.parse(is);
} catch (MimeException e) {
throw new MimeIOException(e);
}
clearFields();
final List<Field> fields = message.getHeader().getFields();
for (Field field: fields) {
addField(field);
}
setBody(message.getBody());
return this;
}
public Message build() {
MessageImpl message = new MessageImpl();
HeaderImpl header = new HeaderImpl();
message.setHeader(header);
if (!containsField(FieldName.MIME_VERSION_LOWERCASE)) {
header.setField(Fields.version("1.0"));
}
for (Field field : getFields()) {
header.addField(field);
}
message.setBody(getBody());
return message;
}
private Mailbox getMailbox(String fieldName) {
MailboxField field = obtainField(fieldName);
return field != null ? field.getMailbox() : null;
}
private Builder setMailbox(String fieldName, Mailbox mailbox) {
if (mailbox == null) {
removeFields(fieldName);
} else {
setField(Fields.mailbox(fieldName, mailbox));
}
return this;
}
private Builder setMailbox(String fieldName, String mailbox) throws ParseException {
if (mailbox == null) {
removeFields(fieldName);
} else {
setField(Fields.mailbox(fieldName, DefaultAddressParser.DEFAULT.parseMailbox(mailbox)));
}
return this;
}
private MailboxList getMailboxList(String fieldName) {
MailboxListField field = obtainField(fieldName);
return field != null ? field.getMailboxList() : null;
}
private Builder setMailboxList(String fieldName, Mailbox mailbox) {
return setMailboxList(fieldName, mailbox == null ? null : Collections.singleton(mailbox));
}
private Builder setMailboxList(String fieldName, String mailbox) throws ParseException {
return setMailboxList(fieldName, mailbox == null ? null : DefaultAddressParser.DEFAULT.parseMailbox(mailbox));
}
private Builder setMailboxList(String fieldName, Mailbox... mailboxes) {
return setMailboxList(fieldName, mailboxes == null ? null : Arrays.asList(mailboxes));
}
private List<Mailbox> parseMailboxes(String... mailboxes) throws ParseException {
if (mailboxes == null || mailboxes.length == 0) {
return null;
} else {
List<Mailbox> list = new ArrayList<Mailbox>();
for (String mailbox: mailboxes) {
list.add(DefaultAddressParser.DEFAULT.parseMailbox(mailbox));
}
return list;
}
}
private Builder setMailboxList(String fieldName, String... mailboxes) throws ParseException {
return setMailboxList(fieldName, parseMailboxes(mailboxes));
}
private Builder setMailboxList(String fieldName, Collection<Mailbox> mailboxes) {
if (mailboxes == null || mailboxes.isEmpty()) {
removeFields(fieldName);
} else {
setField(Fields.mailboxList(fieldName, mailboxes));
}
return this;
}
private AddressList getAddressList(String fieldName) {
AddressListField field = obtainField(fieldName);
return field != null? field.getAddressList() : null;
}
private Builder setAddressList(String fieldName, Address address) {
return setAddressList(fieldName, address == null ? null : Collections.singleton(address));
}
private Builder setAddressList(String fieldName, String address) throws ParseException {
return setAddressList(fieldName, address == null ? null : DefaultAddressParser.DEFAULT.parseMailbox(address));
}
private Builder setAddressList(String fieldName, Address... addresses) {
return setAddressList(fieldName, addresses == null ? null : Arrays.asList(addresses));
}
private List<Address> parseAddresses(String... addresses) throws ParseException {
if (addresses == null || addresses.length == 0) {
return null;
} else {
List<Address> list = new ArrayList<Address>();
for (String address: addresses) {
list.add(DefaultAddressParser.DEFAULT.parseAddress(address));
}
return list;
}
}
private Builder setAddressList(String fieldName, String... addresses) throws ParseException {
return setAddressList(fieldName, parseAddresses(addresses));
}
private Builder setAddressList(String fieldName, Collection<? extends Address> addresses) {
if (addresses == null || addresses.isEmpty()) {
removeFields(fieldName);
} else {
setField(Fields.addressList(fieldName, addresses));
}
return this;
}
}
}
|
google/j2cl | 35,829 | jre/javatests/com/google/j2cl/jre/java/util/TestMap.java | // CHECKSTYLE_OFF: Copyrighted to ASF
/*
* Copyright 1999-2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// CHECKSTYLE_ON
package com.google.j2cl.jre.java.util;
import java.util.Collection;
import java.util.ConcurrentModificationException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import org.jspecify.annotations.NullMarked;
import org.jspecify.annotations.Nullable;
/**
* Tests base {@link java.util.Map} methods and contracts.
* <p>
* The forces at work here are similar to those in {@link TestCollection}.
* If your class implements the full Map interface, including optional
* operations, simply extend this class, and implement the {@link
* #makeEmptyMap()} method.
* <p>
* On the other hand, if your map implemenation is weird, you may have to
* override one or more of the other protected methods. They're described
* below.<P>
*
* <B>Entry Population Methods</B><P>
*
* Override these methods if your map requires special entries:
*
* <UL>
* <LI>{@link #getSampleKeys()}
* <LI>{@link #getSampleValues()}
* <LI>{@link #getNewSampleValues()}
* <LI>{@link #getOtherKeys()}
* <LI>{@link #getOtherValues()}
* </UL>
*
* <B>Supported Operation Methods</B><P>
*
* Override these methods if your map doesn't support certain operations:
*
* <UL>
* <LI> {@link #useDuplicateValues()}
* <LI> {@link #useNullKey()}
* <LI> {@link #useNullValue()}
* <LI> {@link #isAddRemoveModifiable()}
* <LI> {@link #isChangeable()}
* </UL>
*
* <B>Fixture Methods</B><P>
*
* For tests on modification operations (puts and removes), fixtures are used
* to verify that that operation results in correct state for the map and its
* collection views. Basically, the modification is performed against your
* map implementation, and an identical modification is performed against
* a <I>confirmed</I> map implementation. A confirmed map implementation is
* something like <Code>java.util.HashMap</Code>, which is known to conform
* exactly to the {@link Map} contract. After the modification takes place
* on both your map implementation and the confirmed map implementation, the
* two maps are compared to see if their state is identical. The comparison
* also compares the collection views to make sure they're still the same.<P>
*
* The upshot of all that is that <I>any</I> test that modifies the map in
* <I>any</I> way will verify that <I>all</I> of the map's state is still
* correct, including the state of its collection views. So for instance
* if a key is removed by the map's key set's iterator, then the entry set
* is checked to make sure the key/value pair no longer appears.<P>
*
* The {@link #map} field holds an instance of your collection implementation.
* The {@link #entrySet}, {@link #keySet} and {@link #collectionValues} fields hold
* that map's collection views. And the {@link #confirmed} field holds
* an instance of the confirmed collection implementation. The
* {@link #resetEmpty()} and {@link #resetFull()} methods set these fields to
* empty or full maps, so that tests can proceed from a known state.<P>
*
* After a modification operation to both {@link #map} and {@link #confirmed},
* the {@link #verify()} method is invoked to compare the results. The {@link
* verify()} method calls separate methods to verify the map and its three
* collection views ({@link verifyMap(), {@link verifyEntrySet()}, {@link
* verifyKeySet()}, and {@link verifyValues()}). You may want to override one
* of the verification methodsto perform additional verifications. For
* instance, {@link TestDoubleOrderedMap} would want override its {@link
* #verifyValues()} method to verify that the values are unique and in
* ascending order.<P>
*
* <B>Other Notes</B><P>
*
* If your {@link Map} fails one of these tests by design, you may still use
* this base set of cases. Simply override the test case (method) your {@link
* Map} fails and/or the methods that define the assumptions used by the test
* cases. For example, if your map does not allow duplicate values, override
* {@link #useDuplicateValues()} and have it return <code>false</code>
*/
@SuppressWarnings({"unchecked", "rawtypes"})
@NullMarked
abstract class TestMap extends TestObject {
// These instance variables are initialized with the reset method.
// Tests for map methods that alter the map (put, putAll, remove)
// first call reset() to create the map and its views; then perform
// the modification on the map; perform the same modification on the
// confirmed; and then call verify() to ensure that the map is equal
// to the confirmed, that the already-constructed collection views
// are still equal to the confirmed's collection views.
/** Map created by reset(). */
protected Map<@Nullable Object, @Nullable Object> map;
/** Entry set of map created by reset(). */
protected Set<Map.Entry<@Nullable Object, @Nullable Object>> entrySet;
/** Key set of map created by reset(). */
protected Set<@Nullable Object> keySet;
/** Values collection of map created by reset(). */
protected Collection<@Nullable Object> collectionValues;
/** HashMap created by reset(). */
protected Map<@Nullable Object, @Nullable Object> confirmed;
/**
* Override if your map does not allow a <code>null</code> key. The default implementation returns
* <code>true</code>
*/
protected boolean useNullKey() {
return true;
}
/**
* Override if your map does not allow <code>null</code> values. The default implementation
* returns <code>true</code>.
*/
protected boolean useNullValue() {
return true;
}
/**
* Override if your map does not allow duplicate values. The default implementation returns <code>
* true</code>.
*/
protected boolean useDuplicateValues() {
return true;
}
/**
* Override if your map allows its mappings to be changed to new values. The default
* implementation returns <code>true</code>.
*/
protected boolean isChangeable() {
return true;
}
/**
* Override if your map does not allow add/remove modifications. The default implementation
* returns <code>true</code>.
*/
protected boolean isAddRemoveModifiable() {
return true;
}
/**
* Override if your map allows concurrent modifications. The default implementation returns <code>
* true</code>.
*/
protected boolean isFailFastExpected() {
return true;
}
/**
* Returns the set of keys in the mappings used to test the map. This method must return an array
* with the same length as {@link #getSampleValues()} and all array elements must be different.
* The default implementation constructs a set of String keys, and includes a single null key if
* {@link #useNullKey()} returns <code>true</code>.
*/
protected @Nullable Object[] getSampleKeys() {
@Nullable Object[] result =
new @Nullable Object[] {
"blah",
"foo",
"bar",
"baz",
"tmp",
"gosh",
"golly",
"gee",
"hello",
"goodbye",
"we'll",
"see",
"you",
"all",
"again",
"key",
"key2",
useNullKey() ? null : "nonnullkey"
};
return result;
}
protected Object[] getOtherKeys() {
return TestCollection.getOtherNonNullStringElements();
}
protected Object[] getOtherValues() {
return TestCollection.getOtherNonNullStringElements();
}
/**
* Returns the set of values in the mappings used to test the map. This method must return an
* array with the same length as {@link #getSampleKeys()}. The default implementation contructs a
* set of String values and includes a single null value if {@link #useNullValue()} returns <code>
* true</code>, and includes two values that are the same if {@link #useDuplicateValues()} returns
* <code>true</code>.
*/
protected @Nullable Object[] getSampleValues() {
@Nullable Object[] result =
new @Nullable Object[] {
"blahv",
"foov",
"barv",
"bazv",
"tmpv",
"goshv",
"gollyv",
"geev",
"hellov",
"goodbyev",
"we'llv",
"seev",
"youv",
"allv",
"againv",
useNullValue() ? null : "nonnullvalue",
"value",
useDuplicateValues() ? "value" : "value2",
};
return result;
}
/**
* Returns a the set of values that can be used to replace the values returned from {@link
* #getSampleValues()}. This method must return an array with the same length as {@link
* #getSampleValues()}. The values returned from this method should not be the same as those
* returned from {@link #getSampleValues()}. The default implementation constructs a set of String
* values and includes a single null value if {@link #useNullValue()} returns <code>true</code>,
* and includes two values that are the same if {@link #useDuplicateValues()} returns <code>true
* </code>.
*/
protected @Nullable Object[] getNewSampleValues() {
@Nullable Object[] result =
new @Nullable Object[] {
useNullValue() ? null : "newnonnullvalue",
"newvalue",
useDuplicateValues() ? "newvalue" : "newvalue2",
"newblahv",
"newfoov",
"newbarv",
"newbazv",
"newtmpv",
"newgoshv",
"newgollyv",
"newgeev",
"newhellov",
"newgoodbyev",
"newwe'llv",
"newseev",
"newyouv",
"newallv",
"newagainv",
};
return result;
}
/**
* Helper method to add all the mappings described by {@link #getSampleKeys()} and {@link
* #getSampleValues()}.
*/
protected void addSampleMappings(Map m) {
@Nullable Object[] keys = getSampleKeys();
@Nullable Object[] values = getSampleValues();
for (int i = 0; i < keys.length; i++) {
try {
m.put(keys[i], values[i]);
} catch (NullPointerException exception) {
assertTrue(
"NullPointerException only allowed to be thrown "
+ "if either the key or value is null.",
keys[i] == null || values[i] == null);
if (keys[i] == null) {
if (useNullKey()) {
throw new Error(
"NullPointerException on null key, but "
+ "useNullKey is not overridden to return false.",
exception);
}
} else if (values[i] == null) {
if (useNullValue()) {
throw new Error(
"NullPointerException on null value, but "
+ "useNullValue is not overridden to return false.",
exception);
}
} else {
// Unknown reason for NullPointer.
throw exception;
}
}
}
assertEquals("size must reflect number of mappings added.", keys.length, m.size());
}
/** Return a new, empty {@link Map} to be used for testing. */
protected abstract Map<@Nullable Object, @Nullable Object> makeEmptyMap();
protected Map<@Nullable Object, @Nullable Object> makeConfirmedMap() {
return new HashMap();
}
/**
* Return a new, populated map. The mappings in the map should match the keys and values returned
* from {@link #getSampleKeys()} and {@link #getSampleValues()}. The default implementation uses
* makeEmptyMap() and calls {@link #addSampleMappings()} to add all the mappings to the map.
*/
protected Map makeFullMap() {
Map m = makeEmptyMap();
addSampleMappings(m);
return m;
}
@Override
public Object makeObject() {
return makeEmptyMap();
}
public void testSpecialKeysValues() {
String[] keys = {"toString", "constructor", "__proto__", "", "null"};
@Nullable Object[] values =
new @Nullable Object[] {new Object(), new Object(), new Object(), new Object(), null};
Map map = makeEmptyMap();
assertMap(map, keys, values);
@Nullable Object[] undefineds = new @Nullable Object[values.length];
assertMap(map, keys, undefineds);
}
private void assertMap(Map map, @Nullable String[] keys, @Nullable Object[] values) {
assertEmptyMap(map, keys, values);
// Fill the map with special keys/values.
for (int i = 0; i < keys.length; i++) {
map.put(keys[i], values[i]);
}
// Assert the map with filled in keys/values
for (int i = 0; i < keys.length; i++) {
assertTrue(keys[i], map.containsKey(keys[i]));
assertTrue(keys[i], map.containsValue(values[i]));
assertSame(keys[i], values[i], map.get(keys[i]));
}
assertEquals(map.toString(), keys.length, map.size());
// Remove the keys and assert the results
for (int i = 0; i < keys.length; i++) {
assertSame(keys[i], values[i], map.remove(keys[i]));
}
assertEmptyMap(map, keys, values);
}
private static void assertEmptyMap(
Map map, final @Nullable String[] keys, final @Nullable Object[] values) {
for (int i = 0; i < keys.length; i++) {
assertFalse(keys[i], map.containsKey(keys[i]));
assertFalse(keys[i], map.containsValue(values[i]));
assertNull(keys[i], map.get(keys[i]));
}
}
/**
* Test to ensure the test setup is working properly. This method checks to ensure that the
* getSampleKeys and getSampleValues methods are returning results that look appropriate. That is,
* they both return a non-null array of equal length. The keys array must not have any duplicate
* values, and may only contain a (single) null key if useNullKey() returns true. The values array
* must only have a null value if useNullValue() is true and may only have duplicate values if
* useDuplicateValues() returns true.
*/
public void testSampleMappings() {
@Nullable Object[] keys = getSampleKeys();
@Nullable Object[] values = getSampleValues();
Object[] newValues = getNewSampleValues();
assertNotNull("failure in test: Must have keys returned from " + "getSampleKeys.", keys);
assertTrue(
"failure in test: Must have values returned from " + "getSampleValues.", values != null);
// verify keys and values have equivalent lengths (in case getSampleX are
// overridden)
assertEquals(
"failure in test: not the same number of sample " + "keys and values.",
keys.length,
values.length);
assertEquals(
"failure in test: not the same number of values and new values.",
values.length,
newValues.length);
// verify there aren't duplicate keys, and check values
for (int i = 0; i < keys.length - 1; i++) {
for (int j = i + 1; j < keys.length; j++) {
assertTrue("failure in test: duplicate null keys.", (keys[i] != null || keys[j] != null));
assertTrue(
"failure in test: duplicate non-null key.",
(keys[i] == null
|| keys[j] == null
|| (!keys[i].equals(keys[j]) && !keys[j].equals(keys[i]))));
}
assertTrue(
"failure in test: found null key, but useNullKey " + "is false.",
keys[i] != null || useNullKey());
assertTrue(
"failure in test: found null value, but useNullValue " + "is false.",
values[i] != null || useNullValue());
assertTrue(
"failure in test: found null new value, but useNullValue " + "is false.",
newValues[i] != null || useNullValue());
assertTrue(
"failure in test: values should not be the same as new value",
values[i] != newValues[i] && (values[i] == null || !values[i].equals(newValues[i])));
}
}
// tests begin here. Each test adds a little bit of tested functionality.
// Many methods assume previous methods passed. That is, they do not
// exhaustively recheck things that have already been checked in a previous
// test methods.
/**
* Test to ensure that makeEmptyMap and makeFull returns a new non-null map with each invocation.
*/
public void testMakeMap() {
Map em = makeEmptyMap();
assertTrue("failure in test: makeEmptyMap must return a non-null map.", em != null);
Map em2 = makeEmptyMap();
assertTrue("failure in test: makeEmptyMap must return a non-null map.", em != null);
assertTrue(
"failure in test: makeEmptyMap must return a new map " + "with each invocation.",
em != em2);
Map fm = makeFullMap();
assertTrue("failure in test: makeFullMap must return a non-null map.", fm != null);
Map fm2 = makeFullMap();
assertTrue("failure in test: makeFullMap must return a non-null map.", fm != null);
assertTrue(
"failure in test: makeFullMap must return a new map " + "with each invocation.", fm != fm2);
}
/** Tests Map.isEmpty() */
public void testMapIsEmpty() {
resetEmpty();
assertEquals("Map.isEmpty() should return true with an empty map", true, map.isEmpty());
verify();
resetFull();
assertEquals("Map.isEmpty() should return false with a non-empty map", false, map.isEmpty());
verify();
}
/** Tests Map.size() */
public void testMapSize() {
resetEmpty();
assertEquals("Map.size() should be 0 with an empty map", 0, map.size());
verify();
resetFull();
assertEquals(
"Map.size() should equal the number of entries " + "in the map",
getSampleKeys().length,
map.size());
verify();
}
/**
* Tests {@link Map#clear()}. If the map {@link #isAddRemoveModifiable() can add and remove
* elements}, then {@link Map#size()} and {@link Map#isEmpty()} are used to ensure that map has no
* elements after a call to clear. If the map does not support adding and removing elements, this
* method checks to ensure clear throws an UnsupportedOperationException.
*/
public void testMapClear() {
if (!isAddRemoveModifiable()) {
return;
}
resetEmpty();
map.clear();
confirmed.clear();
verify();
resetFull();
map.clear();
confirmed.clear();
verify();
}
/**
* Tests Map.containsKey(Object) by verifying it returns false for all sample keys on a map
* created using an empty map and returns true for all sample keys returned on a full map.
*/
public void testMapContainsKey() {
@Nullable Object[] keys = getSampleKeys();
resetEmpty();
for (int i = 0; i < keys.length; i++) {
assertTrue("Map must not contain key when map is empty", !map.containsKey(keys[i]));
}
verify();
resetFull();
for (int i = 0; i < keys.length; i++) {
assertTrue(
"Map must contain key for a mapping in the map. " + "Missing: " + keys[i],
map.containsKey(keys[i]));
}
verify();
}
/**
* Tests Map.containsValue(Object) by verifying it returns false for all sample values on an empty
* map and returns true for all sample values on a full map.
*/
public void testMapContainsValue() {
Object[] values = getSampleValues();
resetEmpty();
for (int i = 0; i < values.length; i++) {
assertTrue("Empty map must not contain value", !map.containsValue(values[i]));
}
verify();
resetFull();
for (int i = 0; i < values.length; i++) {
assertTrue("Map must contain value for a mapping in the map.", map.containsValue(values[i]));
}
verify();
}
/** Tests Map.equals(Object) */
public void testMapEquals() {
resetEmpty();
assertTrue("Empty maps unequal.", map.equals(confirmed));
verify();
resetFull();
assertTrue("Full maps unequal.", map.equals(confirmed));
verify();
resetFull();
// modify the HashMap created from the full map and make sure this
// change results in map.equals() to return false.
Iterator iter = confirmed.keySet().iterator();
iter.next();
iter.remove();
assertTrue("Different maps equal.", !map.equals(confirmed));
resetFull();
assertTrue("equals(null) returned true.", !map.equals(null));
assertTrue("equals(new Object()) returned true.", !map.equals(new Object()));
verify();
}
/** Tests Map.get(Object) */
public void testMapGet() {
resetEmpty();
Object[] keys = getSampleKeys();
Object[] values = getSampleValues();
for (int i = 0; i < keys.length; i++) {
assertTrue("Empty map.get() should return null.", map.get(keys[i]) == null);
}
verify();
resetFull();
for (int i = 0; i < keys.length; i++) {
assertEquals("Full map.get() should return value from mapping.", values[i], map.get(keys[i]));
}
}
/** Tests Map.hashCode() */
public void testMapHashCode() {
resetEmpty();
assertTrue("Empty maps have different hashCodes.", map.hashCode() == confirmed.hashCode());
resetFull();
assertTrue("Equal maps have different hashCodes.", map.hashCode() == confirmed.hashCode());
}
/**
* Tests Map.toString(). Since the format of the string returned by the toString() method is not
* defined in the Map interface, there is no common way to test the results of the toString()
* method. Thereforce, it is encouraged that Map implementations override this test with one that
* checks the format matches any format defined in its API. This default implementation just
* verifies that the toString() method does not return null.
*/
public void testMapToString() {
resetEmpty();
assertTrue("Empty map toString() should not return null", map.toString() != null);
verify();
resetFull();
assertTrue("Empty map toString() should not return null", map.toString() != null);
verify();
}
/** Tests Map.put(Object, Object) */
public void testMapPut() {
if (!isAddRemoveModifiable()) {
return;
}
resetEmpty();
Object[] keys = getSampleKeys();
Object[] values = getSampleValues();
Object[] newValues = getNewSampleValues();
for (int i = 0; i < keys.length; i++) {
Object o = map.put(keys[i], values[i]);
confirmed.put(keys[i], values[i]);
verify();
assertTrue("First map.put should return null", o == null);
assertTrue("Map should contain key after put", map.containsKey(keys[i]));
assertTrue("Map should contain value after put", map.containsValue(values[i]));
}
for (int i = 0; i < keys.length; i++) {
Object o = map.put(keys[i], newValues[i]);
confirmed.put(keys[i], newValues[i]);
verify();
assertEquals("Second map.put should return previous value", values[i], o);
assertTrue("Map should still contain key after put", map.containsKey(keys[i]));
assertTrue("Map should contain new value after put", map.containsValue(newValues[i]));
// if duplicates are allowed, we're not guarunteed that the value
// no longer exists, so don't try checking that.
if (!useDuplicateValues()) {
assertTrue(
"Map should not contain old value after second put", !map.containsValue(values[i]));
}
}
}
/** Tests Map.putAll(Collection) */
public void testMapPutAll() {
if (!isAddRemoveModifiable()) {
return;
}
resetEmpty();
Map m2 = makeFullMap();
map.putAll(m2);
confirmed.putAll(m2);
verify();
resetEmpty();
m2 = new HashMap();
Object[] keys = getSampleKeys();
Object[] values = getSampleValues();
for (int i = 0; i < keys.length; i++) {
m2.put(keys[i], values[i]);
}
map.putAll(m2);
confirmed.putAll(m2);
verify();
}
/** Tests Map.remove(Object) */
public void testMapRemove() {
if (!isAddRemoveModifiable()) {
return;
}
resetEmpty();
Object[] keys = getSampleKeys();
Object[] values = getSampleValues();
for (int i = 0; i < keys.length; i++) {
Object o = map.remove(keys[i]);
assertTrue("First map.remove should return null", o == null);
}
verify();
resetFull();
for (int i = 0; i < keys.length; i++) {
Object o = map.remove(keys[i]);
confirmed.remove(keys[i]);
verify();
assertEquals("map.remove with valid key should return value", values[i], o);
}
Object[] other = getOtherKeys();
resetFull();
int size = map.size();
for (int i = 0; i < other.length; i++) {
Object o = map.remove(other[i]);
assertEquals("map.remove for nonexistent key should return null", o, null);
assertEquals("map.remove for nonexistent key should not " + "shrink map", size, map.size());
}
verify();
}
public void testFailFastEntrySet() {
if (!isAddRemoveModifiable()) {
return;
}
if (!isFailFastExpected()) {
return;
}
resetFull();
Iterator<Map.Entry<@Nullable Object, @Nullable Object>> it = map.entrySet().iterator();
final Map.Entry val = it.next();
map.remove(val.getKey());
try {
it.next();
fail();
} catch (ConcurrentModificationException expected) {
}
resetFull();
it = map.entrySet().iterator();
it.next();
map.clear();
try {
it.next();
fail();
} catch (ConcurrentModificationException expected) {
}
}
public void testFailFastKeySet() {
if (!isAddRemoveModifiable()) {
return;
}
if (!isFailFastExpected()) {
return;
}
resetFull();
Iterator it = map.keySet().iterator();
final Object val = it.next();
map.remove(val);
try {
it.next();
fail();
} catch (ConcurrentModificationException expected) {
}
resetFull();
it = map.keySet().iterator();
it.next();
map.clear();
try {
it.next();
fail();
} catch (ConcurrentModificationException expected) {
}
}
public void testFailFastValues() {
if (!isAddRemoveModifiable()) {
return;
}
if (!isFailFastExpected()) {
return;
}
resetFull();
Iterator it = map.values().iterator();
it.next();
map.remove(map.keySet().iterator().next());
try {
it.next();
fail();
} catch (ConcurrentModificationException expected) {
}
resetFull();
it = map.values().iterator();
it.next();
map.clear();
try {
it.next();
fail();
} catch (ConcurrentModificationException expected) {
}
}
/**
* Utility methods to create an array of Map.Entry objects out of the given key and value arrays.
*
* <p>
*
* @param keys the array of keys
* @param values the array of values
* @return an array of Map.Entry of those keys to those values
*/
private Map.Entry[] makeEntryArray(Object[] keys, Object[] values) {
Map.Entry[] result = new Map.Entry[keys.length];
for (int i = 0; i < keys.length; i++) {
result[i] = new DefaultMapEntry(keys[i], values[i]);
}
return result;
}
class TestMapEntrySet extends TestSet {
public TestMapEntrySet() {
super("");
}
// Have to implement manually; entrySet doesn't support addAll
@Override
protected @Nullable Object[] getFullElements() {
Object[] k = getSampleKeys();
Object[] v = getSampleValues();
return makeEntryArray(k, v);
}
// Have to implement manually; entrySet doesn't support addAll
@Override
protected Object[] getOtherElements() {
Object[] k = getOtherKeys();
Object[] v = getOtherValues();
return makeEntryArray(k, v);
}
@Override
protected Set<@Nullable Object> makeEmptySet() {
Object result = makeEmptyMap().entrySet();
return (Set<@Nullable Object>) result;
}
@Override
protected Set<@Nullable Object> makeFullSet() {
Object result = makeFullMap().entrySet();
return (Set<@Nullable Object>) result;
}
@Override
protected boolean isAddSupported() {
// Collection views don't support add operations.
return false;
}
@Override
protected boolean isRemoveSupported() {
// Entry set should only support remove if map does
return isAddRemoveModifiable();
}
@Override
protected void resetFull() {
TestMap.this.resetFull();
collection = map.entrySet();
TestMapEntrySet.this.confirmed = TestMap.this.confirmed.entrySet();
}
@Override
protected void resetEmpty() {
TestMap.this.resetEmpty();
collection = map.entrySet();
TestMapEntrySet.this.confirmed = TestMap.this.confirmed.entrySet();
}
@Override
protected void verify() {
super.verify();
TestMap.this.verify();
}
}
class TestMapKeySet extends TestSet {
public TestMapKeySet() {
super("");
}
@Override
protected @Nullable Object[] getFullElements() {
return getSampleKeys();
}
@Override
protected Object[] getOtherElements() {
return getOtherKeys();
}
@Override
protected Set makeEmptySet() {
return makeEmptyMap().keySet();
}
@Override
protected Set makeFullSet() {
return makeFullMap().keySet();
}
@Override
protected boolean isAddSupported() {
return false;
}
@Override
protected boolean isRemoveSupported() {
return isAddRemoveModifiable();
}
@Override
protected void resetEmpty() {
TestMap.this.resetEmpty();
collection = map.keySet();
TestMapKeySet.this.confirmed = TestMap.this.confirmed.keySet();
}
@Override
protected void resetFull() {
TestMap.this.resetFull();
collection = map.keySet();
TestMapKeySet.this.confirmed = TestMap.this.confirmed.keySet();
}
@Override
protected void verify() {
super.verify();
TestMap.this.verify();
}
}
class TestMapValues extends TestCollection {
public TestMapValues() {}
@Override
protected @Nullable Object[] getFullElements() {
return getSampleValues();
}
@Override
protected Object[] getOtherElements() {
return getOtherValues();
}
@Override
protected Collection makeCollection() {
return makeEmptyMap().values();
}
@Override
protected Collection makeFullCollection() {
return makeFullMap().values();
}
@Override
protected boolean isAddSupported() {
return false;
}
@Override
protected boolean isRemoveSupported() {
return isAddRemoveModifiable();
}
@Override
protected boolean areEqualElementsDistinguishable() {
// equal values are associated with different keys, so they are
// distinguishable.
return true;
}
@Override
protected Collection makeConfirmedCollection() {
// never gets called, reset methods are overridden
return null;
}
@Override
protected Collection makeConfirmedFullCollection() {
// never gets called, reset methods are overridden
return null;
}
@Override
protected void resetFull() {
TestMap.this.resetFull();
collection = map.values();
TestMapValues.this.confirmed = TestMap.this.confirmed.values();
}
@Override
protected void resetEmpty() {
TestMap.this.resetEmpty();
collection = map.values();
TestMapValues.this.confirmed = TestMap.this.confirmed.values();
}
@Override
protected void verify() {
super.verify();
TestMap.this.verify();
}
// TODO: should test that a remove on the values collection view
// removes the proper mapping and not just any mapping that may have
// the value equal to the value returned from the values iterator.
}
/**
* Resets the {@link #map}, {@link #entrySet}, {@link #keySet}, {@link #collectionValues} and
* {@link #confirmed} fields to empty.
*/
protected void resetEmpty() {
this.map = makeEmptyMap();
views();
this.confirmed = makeConfirmedMap();
}
/**
* Resets the {@link #map}, {@link #entrySet}, {@link #keySet}, {@link #collectionValues} and
* {@link #confirmed} fields to full.
*/
protected void resetFull() {
this.map = makeFullMap();
views();
this.confirmed = makeConfirmedMap();
Object[] k = getSampleKeys();
Object[] v = getSampleValues();
for (int i = 0; i < k.length; i++) {
confirmed.put(k[i], v[i]);
}
}
/** Resets the collection view fields. */
private void views() {
this.keySet = map.keySet();
this.collectionValues = map.values();
this.entrySet = map.entrySet();
}
/**
* Verifies that {@link #map} is still equal to {@link #confirmed}. This method checks that the
* map is equal to the HashMap, <I>and</I> that the map's collection views are still equal to the
* HashMap's collection views. An <Code>equals</Code> test is done on the maps and their
* collection views; their size and <Code>isEmpty</Code> results are compared; their hashCodes are
* compared; and <Code>containsAll</Code> tests are run on the collection views.
*/
protected void verify() {
verifyMap();
verifyEntrySet();
verifyKeySet();
}
protected void verifyMap() {
int size = confirmed.size();
boolean empty = confirmed.isEmpty();
assertEquals("Map should be same size as HashMap", size, map.size());
assertEquals("Map should be empty if HashMap is", empty, map.isEmpty());
assertEquals("hashCodes should be the same", confirmed.hashCode(), map.hashCode());
// this fails for LRUMap because confirmed.equals() somehow modifies
// map, causing concurrent modification exceptions.
// assertEquals("Map should still equal HashMap", confirmed, map);
// this works though and performs the same verification:
assertTrue("Map should still equal HashMap", map.equals(confirmed));
// TODO: this should really be rexamined to figure out why LRU map
// behaves like it does (the equals shouldn't modify since all accesses
// by the confirmed collection should be through an iterator, thus not
// causing LRUMap to change).
}
protected void verifyEntrySet() {
int size = confirmed.size();
boolean empty = confirmed.isEmpty();
assertEquals("entrySet should be same size as HashMap's", size, entrySet.size());
assertEquals("entrySet should be empty if HashMap is", empty, entrySet.isEmpty());
assertTrue(
"entrySet should contain all HashMap's elements",
entrySet.containsAll(confirmed.entrySet()));
assertEquals(
"entrySet hashCodes should be the same",
confirmed.entrySet().hashCode(),
entrySet.hashCode());
assertEquals("Map's entry set should still equal HashMap's", confirmed.entrySet(), entrySet);
}
protected void verifyKeySet() {
int size = confirmed.size();
boolean empty = confirmed.isEmpty();
assertEquals("keySet should be same size as HashMap's", size, keySet.size());
assertEquals("keySet should be empty if HashMap is", empty, keySet.isEmpty());
assertTrue(
"keySet should contain all HashMap's elements", keySet.containsAll(confirmed.keySet()));
assertEquals(
"keySet hashCodes should be the same", confirmed.keySet().hashCode(), keySet.hashCode());
assertEquals("Map's key set should still equal HashMap's", confirmed.keySet(), keySet);
}
}
|
googleapis/google-cloud-java | 35,996 | java-alloydb/proto-google-cloud-alloydb-v1alpha/src/main/java/com/google/cloud/alloydb/v1alpha/ListDatabasesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/alloydb/v1alpha/service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.alloydb.v1alpha;
/**
*
*
* <pre>
* Message for ListDatabases response.
* </pre>
*
* Protobuf type {@code google.cloud.alloydb.v1alpha.ListDatabasesResponse}
*/
public final class ListDatabasesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.alloydb.v1alpha.ListDatabasesResponse)
ListDatabasesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListDatabasesResponse.newBuilder() to construct.
private ListDatabasesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListDatabasesResponse() {
databases_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListDatabasesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.alloydb.v1alpha.ServiceProto
.internal_static_google_cloud_alloydb_v1alpha_ListDatabasesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.alloydb.v1alpha.ServiceProto
.internal_static_google_cloud_alloydb_v1alpha_ListDatabasesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.alloydb.v1alpha.ListDatabasesResponse.class,
com.google.cloud.alloydb.v1alpha.ListDatabasesResponse.Builder.class);
}
public static final int DATABASES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.alloydb.v1alpha.Database> databases_;
/**
*
*
* <pre>
* The list of databases.
* </pre>
*
* <code>repeated .google.cloud.alloydb.v1alpha.Database databases = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.alloydb.v1alpha.Database> getDatabasesList() {
return databases_;
}
/**
*
*
* <pre>
* The list of databases.
* </pre>
*
* <code>repeated .google.cloud.alloydb.v1alpha.Database databases = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.alloydb.v1alpha.DatabaseOrBuilder>
getDatabasesOrBuilderList() {
return databases_;
}
/**
*
*
* <pre>
* The list of databases.
* </pre>
*
* <code>repeated .google.cloud.alloydb.v1alpha.Database databases = 1;</code>
*/
@java.lang.Override
public int getDatabasesCount() {
return databases_.size();
}
/**
*
*
* <pre>
* The list of databases.
* </pre>
*
* <code>repeated .google.cloud.alloydb.v1alpha.Database databases = 1;</code>
*/
@java.lang.Override
public com.google.cloud.alloydb.v1alpha.Database getDatabases(int index) {
return databases_.get(index);
}
/**
*
*
* <pre>
* The list of databases.
* </pre>
*
* <code>repeated .google.cloud.alloydb.v1alpha.Database databases = 1;</code>
*/
@java.lang.Override
public com.google.cloud.alloydb.v1alpha.DatabaseOrBuilder getDatabasesOrBuilder(int index) {
return databases_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token identifying the next page of results the server should return.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token identifying the next page of results the server should return.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < databases_.size(); i++) {
output.writeMessage(1, databases_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < databases_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, databases_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.alloydb.v1alpha.ListDatabasesResponse)) {
return super.equals(obj);
}
com.google.cloud.alloydb.v1alpha.ListDatabasesResponse other =
(com.google.cloud.alloydb.v1alpha.ListDatabasesResponse) obj;
if (!getDatabasesList().equals(other.getDatabasesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDatabasesCount() > 0) {
hash = (37 * hash) + DATABASES_FIELD_NUMBER;
hash = (53 * hash) + getDatabasesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.alloydb.v1alpha.ListDatabasesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1alpha.ListDatabasesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1alpha.ListDatabasesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1alpha.ListDatabasesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1alpha.ListDatabasesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1alpha.ListDatabasesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1alpha.ListDatabasesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1alpha.ListDatabasesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.alloydb.v1alpha.ListDatabasesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1alpha.ListDatabasesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.alloydb.v1alpha.ListDatabasesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1alpha.ListDatabasesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.alloydb.v1alpha.ListDatabasesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Message for ListDatabases response.
* </pre>
*
* Protobuf type {@code google.cloud.alloydb.v1alpha.ListDatabasesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.alloydb.v1alpha.ListDatabasesResponse)
com.google.cloud.alloydb.v1alpha.ListDatabasesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.alloydb.v1alpha.ServiceProto
.internal_static_google_cloud_alloydb_v1alpha_ListDatabasesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.alloydb.v1alpha.ServiceProto
.internal_static_google_cloud_alloydb_v1alpha_ListDatabasesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.alloydb.v1alpha.ListDatabasesResponse.class,
com.google.cloud.alloydb.v1alpha.ListDatabasesResponse.Builder.class);
}
// Construct using com.google.cloud.alloydb.v1alpha.ListDatabasesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (databasesBuilder_ == null) {
databases_ = java.util.Collections.emptyList();
} else {
databases_ = null;
databasesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.alloydb.v1alpha.ServiceProto
.internal_static_google_cloud_alloydb_v1alpha_ListDatabasesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.alloydb.v1alpha.ListDatabasesResponse getDefaultInstanceForType() {
return com.google.cloud.alloydb.v1alpha.ListDatabasesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.alloydb.v1alpha.ListDatabasesResponse build() {
com.google.cloud.alloydb.v1alpha.ListDatabasesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.alloydb.v1alpha.ListDatabasesResponse buildPartial() {
com.google.cloud.alloydb.v1alpha.ListDatabasesResponse result =
new com.google.cloud.alloydb.v1alpha.ListDatabasesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.alloydb.v1alpha.ListDatabasesResponse result) {
if (databasesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
databases_ = java.util.Collections.unmodifiableList(databases_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.databases_ = databases_;
} else {
result.databases_ = databasesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.alloydb.v1alpha.ListDatabasesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.alloydb.v1alpha.ListDatabasesResponse) {
return mergeFrom((com.google.cloud.alloydb.v1alpha.ListDatabasesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.alloydb.v1alpha.ListDatabasesResponse other) {
if (other == com.google.cloud.alloydb.v1alpha.ListDatabasesResponse.getDefaultInstance())
return this;
if (databasesBuilder_ == null) {
if (!other.databases_.isEmpty()) {
if (databases_.isEmpty()) {
databases_ = other.databases_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDatabasesIsMutable();
databases_.addAll(other.databases_);
}
onChanged();
}
} else {
if (!other.databases_.isEmpty()) {
if (databasesBuilder_.isEmpty()) {
databasesBuilder_.dispose();
databasesBuilder_ = null;
databases_ = other.databases_;
bitField0_ = (bitField0_ & ~0x00000001);
databasesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getDatabasesFieldBuilder()
: null;
} else {
databasesBuilder_.addAllMessages(other.databases_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.alloydb.v1alpha.Database m =
input.readMessage(
com.google.cloud.alloydb.v1alpha.Database.parser(), extensionRegistry);
if (databasesBuilder_ == null) {
ensureDatabasesIsMutable();
databases_.add(m);
} else {
databasesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.alloydb.v1alpha.Database> databases_ =
java.util.Collections.emptyList();
private void ensureDatabasesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
databases_ = new java.util.ArrayList<com.google.cloud.alloydb.v1alpha.Database>(databases_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.alloydb.v1alpha.Database,
com.google.cloud.alloydb.v1alpha.Database.Builder,
com.google.cloud.alloydb.v1alpha.DatabaseOrBuilder>
databasesBuilder_;
/**
*
*
* <pre>
* The list of databases.
* </pre>
*
* <code>repeated .google.cloud.alloydb.v1alpha.Database databases = 1;</code>
*/
public java.util.List<com.google.cloud.alloydb.v1alpha.Database> getDatabasesList() {
if (databasesBuilder_ == null) {
return java.util.Collections.unmodifiableList(databases_);
} else {
return databasesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of databases.
* </pre>
*
* <code>repeated .google.cloud.alloydb.v1alpha.Database databases = 1;</code>
*/
public int getDatabasesCount() {
if (databasesBuilder_ == null) {
return databases_.size();
} else {
return databasesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of databases.
* </pre>
*
* <code>repeated .google.cloud.alloydb.v1alpha.Database databases = 1;</code>
*/
public com.google.cloud.alloydb.v1alpha.Database getDatabases(int index) {
if (databasesBuilder_ == null) {
return databases_.get(index);
} else {
return databasesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of databases.
* </pre>
*
* <code>repeated .google.cloud.alloydb.v1alpha.Database databases = 1;</code>
*/
public Builder setDatabases(int index, com.google.cloud.alloydb.v1alpha.Database value) {
if (databasesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDatabasesIsMutable();
databases_.set(index, value);
onChanged();
} else {
databasesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of databases.
* </pre>
*
* <code>repeated .google.cloud.alloydb.v1alpha.Database databases = 1;</code>
*/
public Builder setDatabases(
int index, com.google.cloud.alloydb.v1alpha.Database.Builder builderForValue) {
if (databasesBuilder_ == null) {
ensureDatabasesIsMutable();
databases_.set(index, builderForValue.build());
onChanged();
} else {
databasesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of databases.
* </pre>
*
* <code>repeated .google.cloud.alloydb.v1alpha.Database databases = 1;</code>
*/
public Builder addDatabases(com.google.cloud.alloydb.v1alpha.Database value) {
if (databasesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDatabasesIsMutable();
databases_.add(value);
onChanged();
} else {
databasesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of databases.
* </pre>
*
* <code>repeated .google.cloud.alloydb.v1alpha.Database databases = 1;</code>
*/
public Builder addDatabases(int index, com.google.cloud.alloydb.v1alpha.Database value) {
if (databasesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDatabasesIsMutable();
databases_.add(index, value);
onChanged();
} else {
databasesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of databases.
* </pre>
*
* <code>repeated .google.cloud.alloydb.v1alpha.Database databases = 1;</code>
*/
public Builder addDatabases(com.google.cloud.alloydb.v1alpha.Database.Builder builderForValue) {
if (databasesBuilder_ == null) {
ensureDatabasesIsMutable();
databases_.add(builderForValue.build());
onChanged();
} else {
databasesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of databases.
* </pre>
*
* <code>repeated .google.cloud.alloydb.v1alpha.Database databases = 1;</code>
*/
public Builder addDatabases(
int index, com.google.cloud.alloydb.v1alpha.Database.Builder builderForValue) {
if (databasesBuilder_ == null) {
ensureDatabasesIsMutable();
databases_.add(index, builderForValue.build());
onChanged();
} else {
databasesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of databases.
* </pre>
*
* <code>repeated .google.cloud.alloydb.v1alpha.Database databases = 1;</code>
*/
public Builder addAllDatabases(
java.lang.Iterable<? extends com.google.cloud.alloydb.v1alpha.Database> values) {
if (databasesBuilder_ == null) {
ensureDatabasesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, databases_);
onChanged();
} else {
databasesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of databases.
* </pre>
*
* <code>repeated .google.cloud.alloydb.v1alpha.Database databases = 1;</code>
*/
public Builder clearDatabases() {
if (databasesBuilder_ == null) {
databases_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
databasesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of databases.
* </pre>
*
* <code>repeated .google.cloud.alloydb.v1alpha.Database databases = 1;</code>
*/
public Builder removeDatabases(int index) {
if (databasesBuilder_ == null) {
ensureDatabasesIsMutable();
databases_.remove(index);
onChanged();
} else {
databasesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of databases.
* </pre>
*
* <code>repeated .google.cloud.alloydb.v1alpha.Database databases = 1;</code>
*/
public com.google.cloud.alloydb.v1alpha.Database.Builder getDatabasesBuilder(int index) {
return getDatabasesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of databases.
* </pre>
*
* <code>repeated .google.cloud.alloydb.v1alpha.Database databases = 1;</code>
*/
public com.google.cloud.alloydb.v1alpha.DatabaseOrBuilder getDatabasesOrBuilder(int index) {
if (databasesBuilder_ == null) {
return databases_.get(index);
} else {
return databasesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of databases.
* </pre>
*
* <code>repeated .google.cloud.alloydb.v1alpha.Database databases = 1;</code>
*/
public java.util.List<? extends com.google.cloud.alloydb.v1alpha.DatabaseOrBuilder>
getDatabasesOrBuilderList() {
if (databasesBuilder_ != null) {
return databasesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(databases_);
}
}
/**
*
*
* <pre>
* The list of databases.
* </pre>
*
* <code>repeated .google.cloud.alloydb.v1alpha.Database databases = 1;</code>
*/
public com.google.cloud.alloydb.v1alpha.Database.Builder addDatabasesBuilder() {
return getDatabasesFieldBuilder()
.addBuilder(com.google.cloud.alloydb.v1alpha.Database.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of databases.
* </pre>
*
* <code>repeated .google.cloud.alloydb.v1alpha.Database databases = 1;</code>
*/
public com.google.cloud.alloydb.v1alpha.Database.Builder addDatabasesBuilder(int index) {
return getDatabasesFieldBuilder()
.addBuilder(index, com.google.cloud.alloydb.v1alpha.Database.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of databases.
* </pre>
*
* <code>repeated .google.cloud.alloydb.v1alpha.Database databases = 1;</code>
*/
public java.util.List<com.google.cloud.alloydb.v1alpha.Database.Builder>
getDatabasesBuilderList() {
return getDatabasesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.alloydb.v1alpha.Database,
com.google.cloud.alloydb.v1alpha.Database.Builder,
com.google.cloud.alloydb.v1alpha.DatabaseOrBuilder>
getDatabasesFieldBuilder() {
if (databasesBuilder_ == null) {
databasesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.alloydb.v1alpha.Database,
com.google.cloud.alloydb.v1alpha.Database.Builder,
com.google.cloud.alloydb.v1alpha.DatabaseOrBuilder>(
databases_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
databases_ = null;
}
return databasesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token identifying the next page of results the server should return.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token identifying the next page of results the server should return.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token identifying the next page of results the server should return.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying the next page of results the server should return.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying the next page of results the server should return.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.alloydb.v1alpha.ListDatabasesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.alloydb.v1alpha.ListDatabasesResponse)
private static final com.google.cloud.alloydb.v1alpha.ListDatabasesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.alloydb.v1alpha.ListDatabasesResponse();
}
public static com.google.cloud.alloydb.v1alpha.ListDatabasesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListDatabasesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListDatabasesResponse>() {
@java.lang.Override
public ListDatabasesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListDatabasesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListDatabasesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.alloydb.v1alpha.ListDatabasesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,062 | java-dialogflow-cx/proto-google-cloud-dialogflow-cx-v3/src/main/java/com/google/cloud/dialogflow/cx/v3/UpdateGenerativeSettingsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/cx/v3/agent.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.cx.v3;
/**
*
*
* <pre>
* Request for
* [UpdateGenerativeSettings][google.cloud.dialogflow.cx.v3.Agents.UpdateGenerativeSettings]
* RPC.
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest}
*/
public final class UpdateGenerativeSettingsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest)
UpdateGenerativeSettingsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateGenerativeSettingsRequest.newBuilder() to construct.
private UpdateGenerativeSettingsRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateGenerativeSettingsRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateGenerativeSettingsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3.AgentProto
.internal_static_google_cloud_dialogflow_cx_v3_UpdateGenerativeSettingsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3.AgentProto
.internal_static_google_cloud_dialogflow_cx_v3_UpdateGenerativeSettingsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest.class,
com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest.Builder.class);
}
private int bitField0_;
public static final int GENERATIVE_SETTINGS_FIELD_NUMBER = 1;
private com.google.cloud.dialogflow.cx.v3.GenerativeSettings generativeSettings_;
/**
*
*
* <pre>
* Required. Generative settings to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.GenerativeSettings generative_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the generativeSettings field is set.
*/
@java.lang.Override
public boolean hasGenerativeSettings() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Generative settings to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.GenerativeSettings generative_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The generativeSettings.
*/
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.GenerativeSettings getGenerativeSettings() {
return generativeSettings_ == null
? com.google.cloud.dialogflow.cx.v3.GenerativeSettings.getDefaultInstance()
: generativeSettings_;
}
/**
*
*
* <pre>
* Required. Generative settings to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.GenerativeSettings generative_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.GenerativeSettingsOrBuilder
getGenerativeSettingsOrBuilder() {
return generativeSettings_ == null
? com.google.cloud.dialogflow.cx.v3.GenerativeSettings.getDefaultInstance()
: generativeSettings_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Optional. The mask to control which fields get updated. If the mask is not
* present, all fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. The mask to control which fields get updated. If the mask is not
* present, all fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Optional. The mask to control which fields get updated. If the mask is not
* present, all fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getGenerativeSettings());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getGenerativeSettings());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest other =
(com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest) obj;
if (hasGenerativeSettings() != other.hasGenerativeSettings()) return false;
if (hasGenerativeSettings()) {
if (!getGenerativeSettings().equals(other.getGenerativeSettings())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasGenerativeSettings()) {
hash = (37 * hash) + GENERATIVE_SETTINGS_FIELD_NUMBER;
hash = (53 * hash) + getGenerativeSettings().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for
* [UpdateGenerativeSettings][google.cloud.dialogflow.cx.v3.Agents.UpdateGenerativeSettings]
* RPC.
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest)
com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3.AgentProto
.internal_static_google_cloud_dialogflow_cx_v3_UpdateGenerativeSettingsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3.AgentProto
.internal_static_google_cloud_dialogflow_cx_v3_UpdateGenerativeSettingsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest.class,
com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest.Builder.class);
}
// Construct using
// com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getGenerativeSettingsFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
generativeSettings_ = null;
if (generativeSettingsBuilder_ != null) {
generativeSettingsBuilder_.dispose();
generativeSettingsBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.cx.v3.AgentProto
.internal_static_google_cloud_dialogflow_cx_v3_UpdateGenerativeSettingsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest
getDefaultInstanceForType() {
return com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest build() {
com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest buildPartial() {
com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest result =
new com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.generativeSettings_ =
generativeSettingsBuilder_ == null
? generativeSettings_
: generativeSettingsBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest) {
return mergeFrom((com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest other) {
if (other
== com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest.getDefaultInstance())
return this;
if (other.hasGenerativeSettings()) {
mergeGenerativeSettings(other.getGenerativeSettings());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(
getGenerativeSettingsFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.dialogflow.cx.v3.GenerativeSettings generativeSettings_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3.GenerativeSettings,
com.google.cloud.dialogflow.cx.v3.GenerativeSettings.Builder,
com.google.cloud.dialogflow.cx.v3.GenerativeSettingsOrBuilder>
generativeSettingsBuilder_;
/**
*
*
* <pre>
* Required. Generative settings to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.GenerativeSettings generative_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the generativeSettings field is set.
*/
public boolean hasGenerativeSettings() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Generative settings to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.GenerativeSettings generative_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The generativeSettings.
*/
public com.google.cloud.dialogflow.cx.v3.GenerativeSettings getGenerativeSettings() {
if (generativeSettingsBuilder_ == null) {
return generativeSettings_ == null
? com.google.cloud.dialogflow.cx.v3.GenerativeSettings.getDefaultInstance()
: generativeSettings_;
} else {
return generativeSettingsBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Generative settings to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.GenerativeSettings generative_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setGenerativeSettings(
com.google.cloud.dialogflow.cx.v3.GenerativeSettings value) {
if (generativeSettingsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
generativeSettings_ = value;
} else {
generativeSettingsBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Generative settings to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.GenerativeSettings generative_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setGenerativeSettings(
com.google.cloud.dialogflow.cx.v3.GenerativeSettings.Builder builderForValue) {
if (generativeSettingsBuilder_ == null) {
generativeSettings_ = builderForValue.build();
} else {
generativeSettingsBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Generative settings to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.GenerativeSettings generative_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeGenerativeSettings(
com.google.cloud.dialogflow.cx.v3.GenerativeSettings value) {
if (generativeSettingsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& generativeSettings_ != null
&& generativeSettings_
!= com.google.cloud.dialogflow.cx.v3.GenerativeSettings.getDefaultInstance()) {
getGenerativeSettingsBuilder().mergeFrom(value);
} else {
generativeSettings_ = value;
}
} else {
generativeSettingsBuilder_.mergeFrom(value);
}
if (generativeSettings_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Generative settings to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.GenerativeSettings generative_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearGenerativeSettings() {
bitField0_ = (bitField0_ & ~0x00000001);
generativeSettings_ = null;
if (generativeSettingsBuilder_ != null) {
generativeSettingsBuilder_.dispose();
generativeSettingsBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Generative settings to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.GenerativeSettings generative_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dialogflow.cx.v3.GenerativeSettings.Builder
getGenerativeSettingsBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getGenerativeSettingsFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Generative settings to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.GenerativeSettings generative_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dialogflow.cx.v3.GenerativeSettingsOrBuilder
getGenerativeSettingsOrBuilder() {
if (generativeSettingsBuilder_ != null) {
return generativeSettingsBuilder_.getMessageOrBuilder();
} else {
return generativeSettings_ == null
? com.google.cloud.dialogflow.cx.v3.GenerativeSettings.getDefaultInstance()
: generativeSettings_;
}
}
/**
*
*
* <pre>
* Required. Generative settings to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3.GenerativeSettings generative_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3.GenerativeSettings,
com.google.cloud.dialogflow.cx.v3.GenerativeSettings.Builder,
com.google.cloud.dialogflow.cx.v3.GenerativeSettingsOrBuilder>
getGenerativeSettingsFieldBuilder() {
if (generativeSettingsBuilder_ == null) {
generativeSettingsBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3.GenerativeSettings,
com.google.cloud.dialogflow.cx.v3.GenerativeSettings.Builder,
com.google.cloud.dialogflow.cx.v3.GenerativeSettingsOrBuilder>(
getGenerativeSettings(), getParentForChildren(), isClean());
generativeSettings_ = null;
}
return generativeSettingsBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Optional. The mask to control which fields get updated. If the mask is not
* present, all fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. The mask to control which fields get updated. If the mask is not
* present, all fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. The mask to control which fields get updated. If the mask is not
* present, all fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The mask to control which fields get updated. If the mask is not
* present, all fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The mask to control which fields get updated. If the mask is not
* present, all fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. The mask to control which fields get updated. If the mask is not
* present, all fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The mask to control which fields get updated. If the mask is not
* present, all fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. The mask to control which fields get updated. If the mask is not
* present, all fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Optional. The mask to control which fields get updated. If the mask is not
* present, all fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest)
private static final com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest();
}
public static com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateGenerativeSettingsRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateGenerativeSettingsRequest>() {
@java.lang.Override
public UpdateGenerativeSettingsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateGenerativeSettingsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateGenerativeSettingsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.UpdateGenerativeSettingsRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/ignite-3 | 36,357 | modules/distribution-zones/src/main/java/org/apache/ignite/internal/distributionzones/rebalance/ZoneRebalanceUtil.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.distributionzones.rebalance;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.concurrent.CompletableFuture.allOf;
import static java.util.stream.Collectors.toList;
import static java.util.stream.Collectors.toSet;
import static org.apache.ignite.internal.distributionzones.rebalance.AssignmentUtil.metastoreAssignments;
import static org.apache.ignite.internal.distributionzones.rebalance.AssignmentUtil.partitionIds;
import static org.apache.ignite.internal.distributionzones.rebalance.RebalanceUtil.UpdateStatus.ASSIGNMENT_NOT_UPDATED;
import static org.apache.ignite.internal.distributionzones.rebalance.RebalanceUtil.UpdateStatus.OUTDATED_UPDATE_RECEIVED;
import static org.apache.ignite.internal.distributionzones.rebalance.RebalanceUtil.UpdateStatus.PENDING_KEY_UPDATED;
import static org.apache.ignite.internal.distributionzones.rebalance.RebalanceUtil.UpdateStatus.PLANNED_KEY_REMOVED_EMPTY_PENDING;
import static org.apache.ignite.internal.distributionzones.rebalance.RebalanceUtil.UpdateStatus.PLANNED_KEY_REMOVED_EQUALS_PENDING;
import static org.apache.ignite.internal.distributionzones.rebalance.RebalanceUtil.UpdateStatus.PLANNED_KEY_UPDATED;
import static org.apache.ignite.internal.metastorage.dsl.Conditions.and;
import static org.apache.ignite.internal.metastorage.dsl.Conditions.exists;
import static org.apache.ignite.internal.metastorage.dsl.Conditions.notExists;
import static org.apache.ignite.internal.metastorage.dsl.Conditions.or;
import static org.apache.ignite.internal.metastorage.dsl.Conditions.value;
import static org.apache.ignite.internal.metastorage.dsl.Operations.ops;
import static org.apache.ignite.internal.metastorage.dsl.Operations.put;
import static org.apache.ignite.internal.metastorage.dsl.Operations.remove;
import static org.apache.ignite.internal.metastorage.dsl.Statements.iif;
import static org.apache.ignite.internal.partitiondistribution.PartitionDistributionUtils.calculateAssignmentForPartition;
import static org.apache.ignite.internal.partitiondistribution.PendingAssignmentsCalculator.pendingAssignmentsCalculator;
import static org.apache.ignite.internal.util.ByteUtils.longToBytesKeepingOrder;
import static org.apache.ignite.internal.util.CompletableFutures.nullCompletedFuture;
import static org.apache.ignite.internal.util.IgniteUtils.inBusyLockAsync;
import static org.apache.ignite.internal.util.StringUtils.toStringWithoutPrefix;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.IntStream;
import org.apache.ignite.internal.catalog.descriptors.CatalogZoneDescriptor;
import org.apache.ignite.internal.catalog.descriptors.ConsistencyMode;
import org.apache.ignite.internal.hlc.HybridTimestamp;
import org.apache.ignite.internal.lang.ByteArray;
import org.apache.ignite.internal.logger.IgniteLogger;
import org.apache.ignite.internal.logger.Loggers;
import org.apache.ignite.internal.metastorage.Entry;
import org.apache.ignite.internal.metastorage.MetaStorageManager;
import org.apache.ignite.internal.metastorage.dsl.Condition;
import org.apache.ignite.internal.metastorage.dsl.Iif;
import org.apache.ignite.internal.partitiondistribution.Assignment;
import org.apache.ignite.internal.partitiondistribution.Assignments;
import org.apache.ignite.internal.partitiondistribution.AssignmentsChain;
import org.apache.ignite.internal.partitiondistribution.AssignmentsQueue;
import org.apache.ignite.internal.replicator.ZonePartitionId;
import org.apache.ignite.internal.util.ExceptionUtils;
import org.apache.ignite.internal.util.IgniteSpinBusyLock;
import org.jetbrains.annotations.Nullable;
/**
* Util class for methods needed for the rebalance process.
*/
public class ZoneRebalanceUtil {
/** Logger. */
private static final IgniteLogger LOG = Loggers.forClass(ZoneRebalanceUtil.class);
/** Key prefix for pending assignments. */
public static final String PENDING_ASSIGNMENTS_QUEUE_PREFIX = "zone.assignments.pending.";
public static final byte[] PENDING_ASSIGNMENTS_QUEUE_PREFIX_BYTES = PENDING_ASSIGNMENTS_QUEUE_PREFIX.getBytes(UTF_8);
/** Key prefix for stable assignments. */
public static final String STABLE_ASSIGNMENTS_PREFIX = "zone.assignments.stable.";
public static final byte[] STABLE_ASSIGNMENTS_PREFIX_BYTES = STABLE_ASSIGNMENTS_PREFIX.getBytes(UTF_8);
/** Key prefix for planned assignments. */
public static final String PLANNED_ASSIGNMENTS_PREFIX = "zone.assignments.planned.";
/** Key prefix for switch reduce assignments. */
public static final String ASSIGNMENTS_SWITCH_REDUCE_PREFIX = "zone.assignments.switch.reduce.";
public static final byte[] ASSIGNMENTS_SWITCH_REDUCE_PREFIX_BYTES = ASSIGNMENTS_SWITCH_REDUCE_PREFIX.getBytes(UTF_8);
/** Key prefix for switch append assignments. */
public static final String ASSIGNMENTS_SWITCH_APPEND_PREFIX = "zone.assignments.switch.append.";
/** Key prefix for change trigger keys. */
private static final String ZONE_PENDING_CHANGE_TRIGGER_PREFIX = "zone.pending.change.trigger.";
static final byte[] ZONE_PENDING_CHANGE_TRIGGER_PREFIX_BYTES = ZONE_PENDING_CHANGE_TRIGGER_PREFIX.getBytes(UTF_8);
public static final String ZONE_ASSIGNMENTS_CHAIN_PREFIX = "zone.assignments.chain.";
/**
* Status values for methods like {@link #updatePendingAssignmentsKeys}.
*/
public enum UpdateStatus {
/**
* Return code of metastore multi-invoke which identifies,
* that pending key was updated to new value (i.e. there is no active rebalance at the moment of call).
*/
PENDING_KEY_UPDATED,
/**
* Return code of metastore multi-invoke which identifies,
* that planned key was updated to new value (i.e. there is an active rebalance at the moment of call).
*/
PLANNED_KEY_UPDATED,
/**
* Return code of metastore multi-invoke which identifies,
* that planned key was removed, because current rebalance is already have the same target.
*/
PLANNED_KEY_REMOVED_EQUALS_PENDING,
/**
* Return code of metastore multi-invoke which identifies,
* that planned key was removed, because current assignment is empty.
*/
PLANNED_KEY_REMOVED_EMPTY_PENDING,
/**
* Return code of metastore multi-invoke which identifies,
* that assignments do not need to be updated.
*/
ASSIGNMENT_NOT_UPDATED,
/**
* Return code of metastore multi-invoke which identifies,
* that this trigger event was already processed by another node and must be skipped.
*/
OUTDATED_UPDATE_RECEIVED;
private static final UpdateStatus[] VALUES = values();
public static UpdateStatus valueOf(int ordinal) {
return VALUES[ordinal];
}
}
/**
* Update keys that related to rebalance algorithm in Meta Storage. Keys are specific for partition.
*
* @param zoneDescriptor Zone descriptor.
* @param zonePartitionId Unique aggregate identifier of a partition of a zone.
* @param dataNodes Data nodes.
* @param partitions Number of partitions in a zone.
* @param replicas Number of replicas for a zone.
* @param consensusGroupSize Number of nodes in a consensus group.
* @param revision Revision of Meta Storage that is specific for the assignment update.
* @param timestamp Timestamp of Meta Storage that is specific for the assignment update.
* @param metaStorageMgr Meta Storage manager.
* @param partNum Partition id.
* @param zoneCfgPartAssignments Zone configuration assignments.
* @param assignmentsTimestamp Time when the catalog version that the assignments were calculated against becomes active.
* @return Future representing result of updating keys in {@code metaStorageMgr}
*/
public static CompletableFuture<Void> updatePendingAssignmentsKeys(
CatalogZoneDescriptor zoneDescriptor,
ZonePartitionId zonePartitionId,
Collection<String> dataNodes,
int partitions,
int replicas,
int consensusGroupSize,
long revision,
HybridTimestamp timestamp,
MetaStorageManager metaStorageMgr,
int partNum,
Set<Assignment> zoneCfgPartAssignments,
long assignmentsTimestamp,
Set<String> aliveNodes,
ConsistencyMode consistencyMode
) {
ByteArray partChangeTriggerKey = pendingChangeTriggerKey(zonePartitionId);
ByteArray partAssignmentsPendingKey = pendingPartAssignmentsQueueKey(zonePartitionId);
ByteArray partAssignmentsPlannedKey = plannedPartAssignmentsKey(zonePartitionId);
ByteArray partAssignmentsStableKey = stablePartAssignmentsKey(zonePartitionId);
Set<Assignment> calculatedAssignments = calculateAssignmentForPartition(
dataNodes,
partNum,
partitions,
replicas,
consensusGroupSize
);
Set<Assignment> targetAssignmentSet;
if (consistencyMode == ConsistencyMode.HIGH_AVAILABILITY) {
// All complicated logic here is needed because we want to return back to stable nodes
// that are returned back after majority is lost and stable was narrowed.
// Let's consider example:
// stable = [A, B, C], dataNodes = [A, B, C]
// B, C left, stable = [A], dataNodes = [A, B, C]
// B returned, we want stable = [A, B], but in terms of data nodes they are not changed and equal [A, B, C]
// So, because scale up mechanism in this case won't adjust stable, we need to add B to stable manually.
// General idea is to filter offline nodes from data nodes, but we need to be careful and do not remove nodes
// bypassing scale down mechanism. If node is offline and presented in previous stable, we won't remove that node.
// First of all, we remove offline nodes from calculated assignments
Set<Assignment> resultingAssignments = calculatedAssignments
.stream()
.filter(a -> aliveNodes.contains(a.consistentId()))
.collect(toSet());
// Here we re-introduce nodes that currently exist in the stable configuration
// but were previously removed without using the normal scale-down process.
for (Assignment assignment : zoneCfgPartAssignments) {
if (calculatedAssignments.contains(assignment)) {
resultingAssignments.add(assignment);
}
}
targetAssignmentSet = resultingAssignments;
} else {
targetAssignmentSet = calculatedAssignments;
}
boolean isNewAssignments = !zoneCfgPartAssignments.equals(targetAssignmentSet);
Assignments targetAssignments = Assignments.of(targetAssignmentSet, assignmentsTimestamp);
AssignmentsQueue partAssignmentsPendingQueue = pendingAssignmentsCalculator()
.stable(Assignments.of(zoneCfgPartAssignments, assignmentsTimestamp))
.target(targetAssignments)
.toQueue();
byte[] partAssignmentsPlannedBytes = targetAssignments.toBytes();
byte[] partAssignmentsPendingBytes = partAssignmentsPendingQueue.toBytes();
// if empty(partition.change.trigger) || partition.change.trigger < event.timestamp:
// if empty(partition.assignments.pending)
// && ((isNewAssignments && empty(partition.assignments.stable))
// || (partition.assignments.stable != calcPartAssignments() && !empty(partition.assignments.stable))):
// partition.assignments.pending = partAssignmentsPendingQueue
// partition.change.trigger = event.timestamp
// else:
// if partition.assignments.pending != partAssignmentsPendingQueue && !empty(partition.assignments.pending)
// partition.assignments.planned = calcPartAssignments()
// partition.change.trigger = event.timestamp
// else if partition.assignments.pending == partAssignmentsPendingQueue
// remove(partition.assignments.planned)
// partition.change.trigger = event.timestamp
// message after the metastorage invoke:
// "Remove planned key because current pending key has the same value."
// else if empty(partition.assignments.pending)
// remove(partition.assignments.planned)
// partition.change.trigger = event.timestamp
// message after the metastorage invoke:
// "Remove planned key because pending is empty and calculated assignments are equal to current assignments."
// else:
// skip
Condition newAssignmentsCondition = exists(partAssignmentsStableKey)
.and(value(partAssignmentsStableKey).ne(partAssignmentsPlannedBytes));
if (isNewAssignments) {
newAssignmentsCondition = notExists(partAssignmentsStableKey).or(newAssignmentsCondition);
}
byte[] timestampBytes = longToBytesKeepingOrder(timestamp.longValue());
Iif iif = iif(
or(notExists(partChangeTriggerKey), value(partChangeTriggerKey).lt(timestampBytes)),
iif(and(notExists(partAssignmentsPendingKey), newAssignmentsCondition),
ops(
put(partAssignmentsPendingKey, partAssignmentsPendingBytes),
put(partChangeTriggerKey, timestampBytes)
).yield(PENDING_KEY_UPDATED.ordinal()),
iif(and(value(partAssignmentsPendingKey).ne(partAssignmentsPendingBytes), exists(partAssignmentsPendingKey)),
ops(
put(partAssignmentsPlannedKey, partAssignmentsPlannedBytes),
put(partChangeTriggerKey, timestampBytes)
).yield(PLANNED_KEY_UPDATED.ordinal()),
iif(value(partAssignmentsPendingKey).eq(partAssignmentsPendingBytes),
ops(
remove(partAssignmentsPlannedKey),
put(partChangeTriggerKey, timestampBytes)
).yield(PLANNED_KEY_REMOVED_EQUALS_PENDING.ordinal()),
iif(notExists(partAssignmentsPendingKey),
ops(
remove(partAssignmentsPlannedKey),
put(partChangeTriggerKey, timestampBytes)
).yield(PLANNED_KEY_REMOVED_EMPTY_PENDING.ordinal()),
ops().yield(ASSIGNMENT_NOT_UPDATED.ordinal()))
))),
ops().yield(OUTDATED_UPDATE_RECEIVED.ordinal()));
return metaStorageMgr.invoke(iif).thenAccept(sr -> {
switch (UpdateStatus.valueOf(sr.getAsInt())) {
case PENDING_KEY_UPDATED:
LOG.info(
"Update metastore pending partitions key [key={}, partition={}, zone={}/{}, newVal={}, timestamp={}]",
partAssignmentsPendingKey.toString(), partNum, zoneDescriptor.id(), zoneDescriptor.name(),
partAssignmentsPendingQueue, timestamp);
break;
case PLANNED_KEY_UPDATED:
LOG.info(
"Update metastore planned partitions key [key={}, partition={}, zone={}/{}, newVal={}]",
partAssignmentsPlannedKey, partNum, zoneDescriptor.id(), zoneDescriptor.name(),
targetAssignmentSet
);
break;
case PLANNED_KEY_REMOVED_EQUALS_PENDING:
LOG.info(
"Remove planned key because current pending key has the same value [key={}, partition={}, zone={}/{}, val={}]",
partAssignmentsPlannedKey.toString(), partNum, zoneDescriptor.id(), zoneDescriptor.name(),
targetAssignmentSet
);
break;
case PLANNED_KEY_REMOVED_EMPTY_PENDING:
LOG.info(
"Remove planned key because pending is empty and calculated assignments are equal to current assignments "
+ "[key={}, partition={}, zone={}/{}, val={}]",
partAssignmentsPlannedKey.toString(), partNum, zoneDescriptor.id(), zoneDescriptor.name(),
targetAssignmentSet
);
break;
case ASSIGNMENT_NOT_UPDATED:
LOG.debug(
"Assignments are not updated [key={}, partition={}, zone={}/{}, val={}]",
partAssignmentsPlannedKey.toString(), partNum, zoneDescriptor.id(), zoneDescriptor.name(),
targetAssignmentSet
);
break;
case OUTDATED_UPDATE_RECEIVED:
LOG.debug(
"Received outdated rebalance trigger event [revision={}, partition={}, zone={}/{}]",
revision, partNum, zoneDescriptor.id(), zoneDescriptor.name());
break;
default:
throw new IllegalStateException("Unknown return code for rebalance metastore multi-invoke");
}
});
}
/**
* Triggers rebalance on all partitions of the provided zone: that is, reads zone assignments from
* the MetaStorage, computes new ones based on the current properties of the zone, the
* provided data nodes, and, if the calculated assignments are different from the ones loaded from the
* MetaStorages, writes them as pending assignments.
*
* @param zoneDescriptor Zone descriptor.
* @param dataNodes Data nodes to use.
* @param storageRevision MetaStorage revision corresponding to this request.
* @param storageTimestamp MetaStorage timestamp corresponding to this request.
* @param metaStorageManager MetaStorage manager used to read/write assignments.
* @param busyLock Busy lock to use.
* @param assignmentsTimestamp Time when the catalog version that the assignments were calculated against becomes active.
* @return Array of futures, one per partition of the zone; the futures complete when the described
* rebalance triggering completes.
*/
static CompletableFuture<Void> triggerZonePartitionsRebalance(
CatalogZoneDescriptor zoneDescriptor,
Set<String> dataNodes,
long storageRevision,
HybridTimestamp storageTimestamp,
MetaStorageManager metaStorageManager,
IgniteSpinBusyLock busyLock,
long assignmentsTimestamp,
Set<String> aliveNodes
) {
CompletableFuture<Map<Integer, Assignments>> zoneAssignmentsFut = zoneStableAssignments(
metaStorageManager,
zoneDescriptor.id(),
partitionIds(zoneDescriptor.partitions())
);
CompletableFuture<?>[] partitionFutures = new CompletableFuture[zoneDescriptor.partitions()];
for (int partId = 0; partId < zoneDescriptor.partitions(); partId++) {
ZonePartitionId replicaGrpId = new ZonePartitionId(zoneDescriptor.id(), partId);
int finalPartId = partId;
partitionFutures[partId] = zoneAssignmentsFut.thenCompose(zoneAssignments -> inBusyLockAsync(busyLock, () -> {
// In case of empty assignments due to initially empty data nodes, assignments will be recalculated
// after the transition to non-empty data nodes.
// In case of empty assignments due to interrupted zone creation, assignments will be written
// during the node recovery and then replicas will be started.
// In case when data nodes become empty, assignments are not recalculated
// (see DistributionZoneRebalanceEngineV2.createDistributionZonesDataNodesListener).
return zoneAssignments.isEmpty() ? nullCompletedFuture() : updatePendingAssignmentsKeys(
zoneDescriptor,
replicaGrpId,
dataNodes,
zoneDescriptor.partitions(),
zoneDescriptor.replicas(),
zoneDescriptor.consensusGroupSize(),
storageRevision,
storageTimestamp,
metaStorageManager,
finalPartId,
zoneAssignments.get(finalPartId).nodes(),
assignmentsTimestamp,
aliveNodes,
zoneDescriptor.consistencyMode()
);
}));
}
// This set is used to deduplicate exceptions (if there is an exception from upstream, for instance,
// when reading from MetaStorage, it will be encountered by every partition future) to avoid noise
// in the logs.
Set<Throwable> unwrappedCauses = ConcurrentHashMap.newKeySet();
for (int partId = 0; partId < partitionFutures.length; partId++) {
int finalPartId = partId;
partitionFutures[partId].exceptionally(e -> {
Throwable cause = ExceptionUtils.unwrapCause(e);
if (unwrappedCauses.add(cause)) {
// The exception is specific to this partition.
LOG.error(
"Exception on updating assignments for [zone={}, partition={}]",
e,
zoneInfo(zoneDescriptor), finalPartId
);
} else {
// The exception is from upstream and not specific for this partition, so don't log the partition index.
LOG.error(
"Exception on updating assignments for [zone={}]",
e,
zoneInfo(zoneDescriptor)
);
}
return null;
});
}
return allOf(partitionFutures);
}
private static String zoneInfo(CatalogZoneDescriptor zoneDescriptor) {
return zoneDescriptor.id() + "/" + zoneDescriptor.name();
}
/**
* Key that is needed for skipping stale events of pending key change.
*
* @param zonePartitionId Unique aggregate identifier of a partition of a zone.
* @return Key for a partition.
* @see <a href="https://github.com/apache/ignite-3/blob/main/modules/table/tech-notes/rebalance.md">Rebalance documentation</a>
*/
public static ByteArray pendingChangeTriggerKey(ZonePartitionId zonePartitionId) {
return new ByteArray(ZONE_PENDING_CHANGE_TRIGGER_PREFIX + zonePartitionId);
}
/**
* Key for the graceful restart in HA mode.
*
* @param partId Unique identifier of a partition.
* @return Key for a partition.
* @see <a href="https://cwiki.apache.org/confluence/display/IGNITE/IEP-131%3A+Partition+Majority+Unavailability+Handling">HA mode</a>
*/
public static ByteArray assignmentsChainKey(ZonePartitionId partId) {
return new ByteArray(ZONE_ASSIGNMENTS_CHAIN_PREFIX + partId);
}
/**
* Key that is needed for the rebalance algorithm.
*
* @param zonePartitionId Unique aggregate identifier of a partition of a zone.
* @return Key for a partition.
* @see <a href="https://github.com/apache/ignite-3/blob/main/modules/table/tech-notes/rebalance.md">Rebalance documentation</a>
*/
public static ByteArray pendingPartAssignmentsQueueKey(ZonePartitionId zonePartitionId) {
return new ByteArray(PENDING_ASSIGNMENTS_QUEUE_PREFIX + zonePartitionId);
}
/**
* Key that is needed for the rebalance algorithm.
*
* @param zonePartitionId Unique aggregate identifier of a partition of a zone.
* @return Key for a partition.
* @see <a href="https://github.com/apache/ignite-3/blob/main/modules/table/tech-notes/rebalance.md">Rebalance documentation</a>
*/
public static ByteArray plannedPartAssignmentsKey(ZonePartitionId zonePartitionId) {
return new ByteArray(PLANNED_ASSIGNMENTS_PREFIX + zonePartitionId);
}
/**
* Key that is needed for the rebalance algorithm.
*
* @param zonePartitionId Unique aggregate identifier of a partition of a zone.
* @return Key for a partition.
* @see <a href="https://github.com/apache/ignite-3/blob/main/modules/table/tech-notes/rebalance.md">Rebalance documentation</a>
*/
public static ByteArray stablePartAssignmentsKey(ZonePartitionId zonePartitionId) {
return new ByteArray(STABLE_ASSIGNMENTS_PREFIX + zonePartitionId);
}
/**
* Key that is needed for the rebalance algorithm.
*
* @param zonePartitionId Unique aggregate identifier of a partition of a zone.
* @return Key for a partition.
* @see <a href="https://github.com/apache/ignite-3/blob/main/modules/table/tech-notes/rebalance.md">Rebalance documentation</a>
*/
public static ByteArray switchReduceKey(ZonePartitionId zonePartitionId) {
return new ByteArray(ASSIGNMENTS_SWITCH_REDUCE_PREFIX + zonePartitionId);
}
/**
* Key that is needed for the rebalance algorithm.
*
* @param zonePartitionId Unique aggregate identifier of a partition of a zone.
* @return Key for a partition.
* @see <a href="https://github.com/apache/ignite-3/blob/main/modules/table/tech-notes/rebalance.md">Rebalance documentation</a>
*/
public static ByteArray switchAppendKey(ZonePartitionId zonePartitionId) {
return new ByteArray(ASSIGNMENTS_SWITCH_APPEND_PREFIX + zonePartitionId);
}
/**
* Converts the given {@code key}, stripping it off the given {@code prefix}, into a {@link ZonePartitionId}.
*
* @param key Metastorage key.
* @param prefix Key prefix.
* @return {@link ZonePartitionId} that was encoded in the key.
*/
public static ZonePartitionId extractZonePartitionId(byte[] key, byte[] prefix) {
var zonePartitionIdString = toStringWithoutPrefix(key, prefix.length);
return ZonePartitionId.fromString(zonePartitionIdString);
}
/**
* Removes nodes from set of nodes.
*
* @param minuend Set to remove nodes from.
* @param subtrahend Set of nodes to be removed.
* @return Result of the subtraction.
*/
public static <T> Set<T> subtract(Set<T> minuend, Set<T> subtrahend) {
return minuend.stream().filter(v -> !subtrahend.contains(v)).collect(toSet());
}
/**
* Adds nodes to the set of nodes.
*
* @param op1 First operand.
* @param op2 Second operand.
* @return Result of the addition.
*/
public static <T> Set<T> union(Set<T> op1, Set<T> op2) {
var res = new HashSet<>(op1);
res.addAll(op2);
return res;
}
/**
* Returns an intersection of two set of nodes.
*
* @param op1 First operand.
* @param op2 Second operand.
* @return Result of the intersection.
*/
public static <T> Set<T> intersect(Set<T> op1, Set<T> op2) {
return op1.stream().filter(op2::contains).collect(toSet());
}
/**
* Returns stable partition assignments from meta storage locally.
*
* @param metaStorageManager Meta storage manager.
* @param zoneId Zone id.
* @param partitionNumber Partition number.
* @param revision Revision.
* @return Returns partition assignments from meta storage locally or {@code null} if assignments is absent.
*/
@Nullable
public static Set<Assignment> zonePartitionAssignmentsGetLocally(
MetaStorageManager metaStorageManager,
int zoneId,
int partitionNumber,
long revision
) {
Assignments assignments =
zoneStableAssignmentsGetLocally(metaStorageManager, new ZonePartitionId(zoneId, partitionNumber), revision);
return assignments == null ? null : assignments.nodes();
}
/**
* Returns zone stable assignments for all zone partitions from meta storage locally. Assignments must be present.
*
* @param metaStorageManager Meta storage manager.
* @param zoneId Zone id.
* @param numberOfPartitions Number of partitions.
* @param revision Revision.
* @return Future with zone assignments as a value.
*/
public static List<Assignments> zoneAssignmentsGetLocally(
MetaStorageManager metaStorageManager,
int zoneId,
int numberOfPartitions,
long revision
) {
return IntStream.range(0, numberOfPartitions)
.mapToObj(p -> {
Assignments assignments =
zoneStableAssignmentsGetLocally(metaStorageManager, new ZonePartitionId(zoneId, p), revision);
assert assignments != null : "No assignments found for " + new ZonePartitionId(zoneId, p);
return assignments;
})
.collect(toList());
}
/**
* Returns zone pending assignments for all zone partitions from meta storage locally. Assignments must be present.
*
* @param metaStorageManager Meta storage manager.
* @param zoneId Zone id.
* @param numberOfPartitions Number of partitions.
* @param revision Revision.
* @return Future with zone assignments as a value.
*/
public static List<@Nullable Assignments> zonePendingAssignmentsGetLocally(
MetaStorageManager metaStorageManager,
int zoneId,
int numberOfPartitions,
long revision
) {
return IntStream.range(0, numberOfPartitions)
.mapToObj(p -> {
Entry e = metaStorageManager.getLocally(pendingPartAssignmentsQueueKey(new ZonePartitionId(zoneId, p)), revision);
return e != null && !e.empty() && !e.tombstone() ? AssignmentsQueue.fromBytes(e.value()).poll() : null;
})
.collect(toList());
}
/**
* Returns stable partition assignments from meta storage.
*
* @param metaStorageManager Meta storage manager.
* @param zoneId Table ID.
* @param partitionId Partition ID.
* @return Future with partition assignments as a value.
*/
public static CompletableFuture<Set<Assignment>> zonePartitionAssignments(
MetaStorageManager metaStorageManager,
int zoneId,
int partitionId
) {
return metaStorageManager
.get(stablePartAssignmentsKey(new ZonePartitionId(zoneId, partitionId)))
.thenApply(e -> (e.value() == null) ? null : Assignments.fromBytes(e.value()).nodes());
}
/**
* Returns zone assignments for zone partitions from meta storage.
*
* @param metaStorageManager Meta storage manager.
* @param zoneId Zone id.
* @param partitionIds IDs of partitions to get assignments for.
* @return Future with zone assignments as a value.
*/
public static CompletableFuture<Map<Integer, Assignments>> zoneStableAssignments(
MetaStorageManager metaStorageManager,
int zoneId,
int[] partitionIds
) {
return metastoreAssignments(
metaStorageManager,
partitionIds,
partitionId -> stablePartAssignmentsKey(new ZonePartitionId(zoneId, partitionId))
).whenComplete((assignmentsMap, throwable) -> {
if (throwable == null) {
int numberOfMsPartitions = assignmentsMap.size();
assert numberOfMsPartitions == 0 || numberOfMsPartitions == partitionIds.length
: "Invalid number of partition entries received from meta storage [received="
+ numberOfMsPartitions + ", numberOfPartitions=" + partitionIds.length + ", zoneId=" + zoneId + "].";
}
});
}
/**
* Returns partition assignments from meta storage locally.
*
* @param metaStorageManager Meta storage manager.
* @param zonePartitionId Zone partition id.
* @param revision Revision.
* @return Returns partition assignments from meta storage locally or {@code null} if assignments is absent.
*/
public static @Nullable Assignments zoneStableAssignmentsGetLocally(
MetaStorageManager metaStorageManager,
ZonePartitionId zonePartitionId,
long revision
) {
Entry entry = metaStorageManager.getLocally(stablePartAssignmentsKey(zonePartitionId), revision);
return (entry == null || entry.empty() || entry.tombstone()) ? null : Assignments.fromBytes(entry.value());
}
/**
* Returns assignments chains for all table partitions from meta storage locally.
*
* @param metaStorageManager Meta storage manager.
* @param zoneId Zone id.
* @param numberOfPartitions Number of partitions.
* @param revision Revision.
* @return Future with table assignments as a value.
*/
public static List<AssignmentsChain> zoneAssignmentsChainGetLocally(
MetaStorageManager metaStorageManager,
int zoneId,
int numberOfPartitions,
long revision
) {
return IntStream.range(0, numberOfPartitions)
.mapToObj(p -> assignmentsChainGetLocally(metaStorageManager, new ZonePartitionId(zoneId, p), revision))
.collect(toList());
}
/**
* Returns assignments chain from meta storage locally.
*
* @param metaStorageManager Meta storage manager.
* @param zonePartitionId Zone partition id.
* @param revision Revision.
* @return Returns assignments chain from meta storage locally or {@code null} if assignments is absent.
*/
public static @Nullable AssignmentsChain assignmentsChainGetLocally(
MetaStorageManager metaStorageManager,
ZonePartitionId zonePartitionId,
long revision
) {
Entry e = metaStorageManager.getLocally(assignmentsChainKey(zonePartitionId), revision);
return e != null ? AssignmentsChain.fromBytes(e.value()) : null;
}
}
|
googleads/google-ads-java | 36,291 | google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/common/ChainLocationGroup.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v19/common/asset_set_types.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v19.common;
/**
* <pre>
* Represents information about a Chain dynamic location group.
* Only applicable if the sync level AssetSet's type is LOCATION_SYNC and
* sync source is chain.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.common.ChainLocationGroup}
*/
public final class ChainLocationGroup extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v19.common.ChainLocationGroup)
ChainLocationGroupOrBuilder {
private static final long serialVersionUID = 0L;
// Use ChainLocationGroup.newBuilder() to construct.
private ChainLocationGroup(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ChainLocationGroup() {
dynamicChainLocationGroupFilters_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ChainLocationGroup();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.common.AssetSetTypesProto.internal_static_google_ads_googleads_v19_common_ChainLocationGroup_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.common.AssetSetTypesProto.internal_static_google_ads_googleads_v19_common_ChainLocationGroup_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.common.ChainLocationGroup.class, com.google.ads.googleads.v19.common.ChainLocationGroup.Builder.class);
}
public static final int DYNAMIC_CHAIN_LOCATION_GROUP_FILTERS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.ads.googleads.v19.common.ChainFilter> dynamicChainLocationGroupFilters_;
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.ads.googleads.v19.common.ChainFilter> getDynamicChainLocationGroupFiltersList() {
return dynamicChainLocationGroupFilters_;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.ads.googleads.v19.common.ChainFilterOrBuilder>
getDynamicChainLocationGroupFiltersOrBuilderList() {
return dynamicChainLocationGroupFilters_;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
@java.lang.Override
public int getDynamicChainLocationGroupFiltersCount() {
return dynamicChainLocationGroupFilters_.size();
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v19.common.ChainFilter getDynamicChainLocationGroupFilters(int index) {
return dynamicChainLocationGroupFilters_.get(index);
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v19.common.ChainFilterOrBuilder getDynamicChainLocationGroupFiltersOrBuilder(
int index) {
return dynamicChainLocationGroupFilters_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < dynamicChainLocationGroupFilters_.size(); i++) {
output.writeMessage(1, dynamicChainLocationGroupFilters_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < dynamicChainLocationGroupFilters_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, dynamicChainLocationGroupFilters_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v19.common.ChainLocationGroup)) {
return super.equals(obj);
}
com.google.ads.googleads.v19.common.ChainLocationGroup other = (com.google.ads.googleads.v19.common.ChainLocationGroup) obj;
if (!getDynamicChainLocationGroupFiltersList()
.equals(other.getDynamicChainLocationGroupFiltersList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDynamicChainLocationGroupFiltersCount() > 0) {
hash = (37 * hash) + DYNAMIC_CHAIN_LOCATION_GROUP_FILTERS_FIELD_NUMBER;
hash = (53 * hash) + getDynamicChainLocationGroupFiltersList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v19.common.ChainLocationGroup parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.common.ChainLocationGroup parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.common.ChainLocationGroup parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.common.ChainLocationGroup parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.common.ChainLocationGroup parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.common.ChainLocationGroup parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.common.ChainLocationGroup parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.common.ChainLocationGroup parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.common.ChainLocationGroup parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.common.ChainLocationGroup parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.common.ChainLocationGroup parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.common.ChainLocationGroup parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v19.common.ChainLocationGroup prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Represents information about a Chain dynamic location group.
* Only applicable if the sync level AssetSet's type is LOCATION_SYNC and
* sync source is chain.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.common.ChainLocationGroup}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.common.ChainLocationGroup)
com.google.ads.googleads.v19.common.ChainLocationGroupOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.common.AssetSetTypesProto.internal_static_google_ads_googleads_v19_common_ChainLocationGroup_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.common.AssetSetTypesProto.internal_static_google_ads_googleads_v19_common_ChainLocationGroup_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.common.ChainLocationGroup.class, com.google.ads.googleads.v19.common.ChainLocationGroup.Builder.class);
}
// Construct using com.google.ads.googleads.v19.common.ChainLocationGroup.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
dynamicChainLocationGroupFilters_ = java.util.Collections.emptyList();
} else {
dynamicChainLocationGroupFilters_ = null;
dynamicChainLocationGroupFiltersBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v19.common.AssetSetTypesProto.internal_static_google_ads_googleads_v19_common_ChainLocationGroup_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v19.common.ChainLocationGroup getDefaultInstanceForType() {
return com.google.ads.googleads.v19.common.ChainLocationGroup.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v19.common.ChainLocationGroup build() {
com.google.ads.googleads.v19.common.ChainLocationGroup result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v19.common.ChainLocationGroup buildPartial() {
com.google.ads.googleads.v19.common.ChainLocationGroup result = new com.google.ads.googleads.v19.common.ChainLocationGroup(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.ads.googleads.v19.common.ChainLocationGroup result) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
dynamicChainLocationGroupFilters_ = java.util.Collections.unmodifiableList(dynamicChainLocationGroupFilters_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.dynamicChainLocationGroupFilters_ = dynamicChainLocationGroupFilters_;
} else {
result.dynamicChainLocationGroupFilters_ = dynamicChainLocationGroupFiltersBuilder_.build();
}
}
private void buildPartial0(com.google.ads.googleads.v19.common.ChainLocationGroup result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v19.common.ChainLocationGroup) {
return mergeFrom((com.google.ads.googleads.v19.common.ChainLocationGroup)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v19.common.ChainLocationGroup other) {
if (other == com.google.ads.googleads.v19.common.ChainLocationGroup.getDefaultInstance()) return this;
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
if (!other.dynamicChainLocationGroupFilters_.isEmpty()) {
if (dynamicChainLocationGroupFilters_.isEmpty()) {
dynamicChainLocationGroupFilters_ = other.dynamicChainLocationGroupFilters_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDynamicChainLocationGroupFiltersIsMutable();
dynamicChainLocationGroupFilters_.addAll(other.dynamicChainLocationGroupFilters_);
}
onChanged();
}
} else {
if (!other.dynamicChainLocationGroupFilters_.isEmpty()) {
if (dynamicChainLocationGroupFiltersBuilder_.isEmpty()) {
dynamicChainLocationGroupFiltersBuilder_.dispose();
dynamicChainLocationGroupFiltersBuilder_ = null;
dynamicChainLocationGroupFilters_ = other.dynamicChainLocationGroupFilters_;
bitField0_ = (bitField0_ & ~0x00000001);
dynamicChainLocationGroupFiltersBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getDynamicChainLocationGroupFiltersFieldBuilder() : null;
} else {
dynamicChainLocationGroupFiltersBuilder_.addAllMessages(other.dynamicChainLocationGroupFilters_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
com.google.ads.googleads.v19.common.ChainFilter m =
input.readMessage(
com.google.ads.googleads.v19.common.ChainFilter.parser(),
extensionRegistry);
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
ensureDynamicChainLocationGroupFiltersIsMutable();
dynamicChainLocationGroupFilters_.add(m);
} else {
dynamicChainLocationGroupFiltersBuilder_.addMessage(m);
}
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.ads.googleads.v19.common.ChainFilter> dynamicChainLocationGroupFilters_ =
java.util.Collections.emptyList();
private void ensureDynamicChainLocationGroupFiltersIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
dynamicChainLocationGroupFilters_ = new java.util.ArrayList<com.google.ads.googleads.v19.common.ChainFilter>(dynamicChainLocationGroupFilters_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v19.common.ChainFilter, com.google.ads.googleads.v19.common.ChainFilter.Builder, com.google.ads.googleads.v19.common.ChainFilterOrBuilder> dynamicChainLocationGroupFiltersBuilder_;
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public java.util.List<com.google.ads.googleads.v19.common.ChainFilter> getDynamicChainLocationGroupFiltersList() {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
return java.util.Collections.unmodifiableList(dynamicChainLocationGroupFilters_);
} else {
return dynamicChainLocationGroupFiltersBuilder_.getMessageList();
}
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public int getDynamicChainLocationGroupFiltersCount() {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
return dynamicChainLocationGroupFilters_.size();
} else {
return dynamicChainLocationGroupFiltersBuilder_.getCount();
}
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public com.google.ads.googleads.v19.common.ChainFilter getDynamicChainLocationGroupFilters(int index) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
return dynamicChainLocationGroupFilters_.get(index);
} else {
return dynamicChainLocationGroupFiltersBuilder_.getMessage(index);
}
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public Builder setDynamicChainLocationGroupFilters(
int index, com.google.ads.googleads.v19.common.ChainFilter value) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDynamicChainLocationGroupFiltersIsMutable();
dynamicChainLocationGroupFilters_.set(index, value);
onChanged();
} else {
dynamicChainLocationGroupFiltersBuilder_.setMessage(index, value);
}
return this;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public Builder setDynamicChainLocationGroupFilters(
int index, com.google.ads.googleads.v19.common.ChainFilter.Builder builderForValue) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
ensureDynamicChainLocationGroupFiltersIsMutable();
dynamicChainLocationGroupFilters_.set(index, builderForValue.build());
onChanged();
} else {
dynamicChainLocationGroupFiltersBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public Builder addDynamicChainLocationGroupFilters(com.google.ads.googleads.v19.common.ChainFilter value) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDynamicChainLocationGroupFiltersIsMutable();
dynamicChainLocationGroupFilters_.add(value);
onChanged();
} else {
dynamicChainLocationGroupFiltersBuilder_.addMessage(value);
}
return this;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public Builder addDynamicChainLocationGroupFilters(
int index, com.google.ads.googleads.v19.common.ChainFilter value) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDynamicChainLocationGroupFiltersIsMutable();
dynamicChainLocationGroupFilters_.add(index, value);
onChanged();
} else {
dynamicChainLocationGroupFiltersBuilder_.addMessage(index, value);
}
return this;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public Builder addDynamicChainLocationGroupFilters(
com.google.ads.googleads.v19.common.ChainFilter.Builder builderForValue) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
ensureDynamicChainLocationGroupFiltersIsMutable();
dynamicChainLocationGroupFilters_.add(builderForValue.build());
onChanged();
} else {
dynamicChainLocationGroupFiltersBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public Builder addDynamicChainLocationGroupFilters(
int index, com.google.ads.googleads.v19.common.ChainFilter.Builder builderForValue) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
ensureDynamicChainLocationGroupFiltersIsMutable();
dynamicChainLocationGroupFilters_.add(index, builderForValue.build());
onChanged();
} else {
dynamicChainLocationGroupFiltersBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public Builder addAllDynamicChainLocationGroupFilters(
java.lang.Iterable<? extends com.google.ads.googleads.v19.common.ChainFilter> values) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
ensureDynamicChainLocationGroupFiltersIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, dynamicChainLocationGroupFilters_);
onChanged();
} else {
dynamicChainLocationGroupFiltersBuilder_.addAllMessages(values);
}
return this;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public Builder clearDynamicChainLocationGroupFilters() {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
dynamicChainLocationGroupFilters_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
dynamicChainLocationGroupFiltersBuilder_.clear();
}
return this;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public Builder removeDynamicChainLocationGroupFilters(int index) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
ensureDynamicChainLocationGroupFiltersIsMutable();
dynamicChainLocationGroupFilters_.remove(index);
onChanged();
} else {
dynamicChainLocationGroupFiltersBuilder_.remove(index);
}
return this;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public com.google.ads.googleads.v19.common.ChainFilter.Builder getDynamicChainLocationGroupFiltersBuilder(
int index) {
return getDynamicChainLocationGroupFiltersFieldBuilder().getBuilder(index);
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public com.google.ads.googleads.v19.common.ChainFilterOrBuilder getDynamicChainLocationGroupFiltersOrBuilder(
int index) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
return dynamicChainLocationGroupFilters_.get(index); } else {
return dynamicChainLocationGroupFiltersBuilder_.getMessageOrBuilder(index);
}
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public java.util.List<? extends com.google.ads.googleads.v19.common.ChainFilterOrBuilder>
getDynamicChainLocationGroupFiltersOrBuilderList() {
if (dynamicChainLocationGroupFiltersBuilder_ != null) {
return dynamicChainLocationGroupFiltersBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(dynamicChainLocationGroupFilters_);
}
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public com.google.ads.googleads.v19.common.ChainFilter.Builder addDynamicChainLocationGroupFiltersBuilder() {
return getDynamicChainLocationGroupFiltersFieldBuilder().addBuilder(
com.google.ads.googleads.v19.common.ChainFilter.getDefaultInstance());
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public com.google.ads.googleads.v19.common.ChainFilter.Builder addDynamicChainLocationGroupFiltersBuilder(
int index) {
return getDynamicChainLocationGroupFiltersFieldBuilder().addBuilder(
index, com.google.ads.googleads.v19.common.ChainFilter.getDefaultInstance());
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public java.util.List<com.google.ads.googleads.v19.common.ChainFilter.Builder>
getDynamicChainLocationGroupFiltersBuilderList() {
return getDynamicChainLocationGroupFiltersFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v19.common.ChainFilter, com.google.ads.googleads.v19.common.ChainFilter.Builder, com.google.ads.googleads.v19.common.ChainFilterOrBuilder>
getDynamicChainLocationGroupFiltersFieldBuilder() {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
dynamicChainLocationGroupFiltersBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v19.common.ChainFilter, com.google.ads.googleads.v19.common.ChainFilter.Builder, com.google.ads.googleads.v19.common.ChainFilterOrBuilder>(
dynamicChainLocationGroupFilters_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
dynamicChainLocationGroupFilters_ = null;
}
return dynamicChainLocationGroupFiltersBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.common.ChainLocationGroup)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v19.common.ChainLocationGroup)
private static final com.google.ads.googleads.v19.common.ChainLocationGroup DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v19.common.ChainLocationGroup();
}
public static com.google.ads.googleads.v19.common.ChainLocationGroup getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ChainLocationGroup>
PARSER = new com.google.protobuf.AbstractParser<ChainLocationGroup>() {
@java.lang.Override
public ChainLocationGroup parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ChainLocationGroup> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ChainLocationGroup> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v19.common.ChainLocationGroup getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 36,291 | google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/common/ChainLocationGroup.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v20/common/asset_set_types.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v20.common;
/**
* <pre>
* Represents information about a Chain dynamic location group.
* Only applicable if the sync level AssetSet's type is LOCATION_SYNC and
* sync source is chain.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.common.ChainLocationGroup}
*/
public final class ChainLocationGroup extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v20.common.ChainLocationGroup)
ChainLocationGroupOrBuilder {
private static final long serialVersionUID = 0L;
// Use ChainLocationGroup.newBuilder() to construct.
private ChainLocationGroup(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ChainLocationGroup() {
dynamicChainLocationGroupFilters_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ChainLocationGroup();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.common.AssetSetTypesProto.internal_static_google_ads_googleads_v20_common_ChainLocationGroup_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.common.AssetSetTypesProto.internal_static_google_ads_googleads_v20_common_ChainLocationGroup_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.common.ChainLocationGroup.class, com.google.ads.googleads.v20.common.ChainLocationGroup.Builder.class);
}
public static final int DYNAMIC_CHAIN_LOCATION_GROUP_FILTERS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.ads.googleads.v20.common.ChainFilter> dynamicChainLocationGroupFilters_;
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.ads.googleads.v20.common.ChainFilter> getDynamicChainLocationGroupFiltersList() {
return dynamicChainLocationGroupFilters_;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.ads.googleads.v20.common.ChainFilterOrBuilder>
getDynamicChainLocationGroupFiltersOrBuilderList() {
return dynamicChainLocationGroupFilters_;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
@java.lang.Override
public int getDynamicChainLocationGroupFiltersCount() {
return dynamicChainLocationGroupFilters_.size();
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v20.common.ChainFilter getDynamicChainLocationGroupFilters(int index) {
return dynamicChainLocationGroupFilters_.get(index);
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v20.common.ChainFilterOrBuilder getDynamicChainLocationGroupFiltersOrBuilder(
int index) {
return dynamicChainLocationGroupFilters_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < dynamicChainLocationGroupFilters_.size(); i++) {
output.writeMessage(1, dynamicChainLocationGroupFilters_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < dynamicChainLocationGroupFilters_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, dynamicChainLocationGroupFilters_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v20.common.ChainLocationGroup)) {
return super.equals(obj);
}
com.google.ads.googleads.v20.common.ChainLocationGroup other = (com.google.ads.googleads.v20.common.ChainLocationGroup) obj;
if (!getDynamicChainLocationGroupFiltersList()
.equals(other.getDynamicChainLocationGroupFiltersList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDynamicChainLocationGroupFiltersCount() > 0) {
hash = (37 * hash) + DYNAMIC_CHAIN_LOCATION_GROUP_FILTERS_FIELD_NUMBER;
hash = (53 * hash) + getDynamicChainLocationGroupFiltersList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v20.common.ChainLocationGroup parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.common.ChainLocationGroup parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.common.ChainLocationGroup parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.common.ChainLocationGroup parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.common.ChainLocationGroup parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.common.ChainLocationGroup parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.common.ChainLocationGroup parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.common.ChainLocationGroup parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.common.ChainLocationGroup parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.common.ChainLocationGroup parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.common.ChainLocationGroup parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.common.ChainLocationGroup parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v20.common.ChainLocationGroup prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Represents information about a Chain dynamic location group.
* Only applicable if the sync level AssetSet's type is LOCATION_SYNC and
* sync source is chain.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.common.ChainLocationGroup}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.common.ChainLocationGroup)
com.google.ads.googleads.v20.common.ChainLocationGroupOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.common.AssetSetTypesProto.internal_static_google_ads_googleads_v20_common_ChainLocationGroup_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.common.AssetSetTypesProto.internal_static_google_ads_googleads_v20_common_ChainLocationGroup_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.common.ChainLocationGroup.class, com.google.ads.googleads.v20.common.ChainLocationGroup.Builder.class);
}
// Construct using com.google.ads.googleads.v20.common.ChainLocationGroup.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
dynamicChainLocationGroupFilters_ = java.util.Collections.emptyList();
} else {
dynamicChainLocationGroupFilters_ = null;
dynamicChainLocationGroupFiltersBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v20.common.AssetSetTypesProto.internal_static_google_ads_googleads_v20_common_ChainLocationGroup_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v20.common.ChainLocationGroup getDefaultInstanceForType() {
return com.google.ads.googleads.v20.common.ChainLocationGroup.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v20.common.ChainLocationGroup build() {
com.google.ads.googleads.v20.common.ChainLocationGroup result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v20.common.ChainLocationGroup buildPartial() {
com.google.ads.googleads.v20.common.ChainLocationGroup result = new com.google.ads.googleads.v20.common.ChainLocationGroup(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.ads.googleads.v20.common.ChainLocationGroup result) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
dynamicChainLocationGroupFilters_ = java.util.Collections.unmodifiableList(dynamicChainLocationGroupFilters_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.dynamicChainLocationGroupFilters_ = dynamicChainLocationGroupFilters_;
} else {
result.dynamicChainLocationGroupFilters_ = dynamicChainLocationGroupFiltersBuilder_.build();
}
}
private void buildPartial0(com.google.ads.googleads.v20.common.ChainLocationGroup result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v20.common.ChainLocationGroup) {
return mergeFrom((com.google.ads.googleads.v20.common.ChainLocationGroup)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v20.common.ChainLocationGroup other) {
if (other == com.google.ads.googleads.v20.common.ChainLocationGroup.getDefaultInstance()) return this;
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
if (!other.dynamicChainLocationGroupFilters_.isEmpty()) {
if (dynamicChainLocationGroupFilters_.isEmpty()) {
dynamicChainLocationGroupFilters_ = other.dynamicChainLocationGroupFilters_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDynamicChainLocationGroupFiltersIsMutable();
dynamicChainLocationGroupFilters_.addAll(other.dynamicChainLocationGroupFilters_);
}
onChanged();
}
} else {
if (!other.dynamicChainLocationGroupFilters_.isEmpty()) {
if (dynamicChainLocationGroupFiltersBuilder_.isEmpty()) {
dynamicChainLocationGroupFiltersBuilder_.dispose();
dynamicChainLocationGroupFiltersBuilder_ = null;
dynamicChainLocationGroupFilters_ = other.dynamicChainLocationGroupFilters_;
bitField0_ = (bitField0_ & ~0x00000001);
dynamicChainLocationGroupFiltersBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getDynamicChainLocationGroupFiltersFieldBuilder() : null;
} else {
dynamicChainLocationGroupFiltersBuilder_.addAllMessages(other.dynamicChainLocationGroupFilters_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
com.google.ads.googleads.v20.common.ChainFilter m =
input.readMessage(
com.google.ads.googleads.v20.common.ChainFilter.parser(),
extensionRegistry);
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
ensureDynamicChainLocationGroupFiltersIsMutable();
dynamicChainLocationGroupFilters_.add(m);
} else {
dynamicChainLocationGroupFiltersBuilder_.addMessage(m);
}
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.ads.googleads.v20.common.ChainFilter> dynamicChainLocationGroupFilters_ =
java.util.Collections.emptyList();
private void ensureDynamicChainLocationGroupFiltersIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
dynamicChainLocationGroupFilters_ = new java.util.ArrayList<com.google.ads.googleads.v20.common.ChainFilter>(dynamicChainLocationGroupFilters_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v20.common.ChainFilter, com.google.ads.googleads.v20.common.ChainFilter.Builder, com.google.ads.googleads.v20.common.ChainFilterOrBuilder> dynamicChainLocationGroupFiltersBuilder_;
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public java.util.List<com.google.ads.googleads.v20.common.ChainFilter> getDynamicChainLocationGroupFiltersList() {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
return java.util.Collections.unmodifiableList(dynamicChainLocationGroupFilters_);
} else {
return dynamicChainLocationGroupFiltersBuilder_.getMessageList();
}
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public int getDynamicChainLocationGroupFiltersCount() {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
return dynamicChainLocationGroupFilters_.size();
} else {
return dynamicChainLocationGroupFiltersBuilder_.getCount();
}
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public com.google.ads.googleads.v20.common.ChainFilter getDynamicChainLocationGroupFilters(int index) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
return dynamicChainLocationGroupFilters_.get(index);
} else {
return dynamicChainLocationGroupFiltersBuilder_.getMessage(index);
}
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public Builder setDynamicChainLocationGroupFilters(
int index, com.google.ads.googleads.v20.common.ChainFilter value) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDynamicChainLocationGroupFiltersIsMutable();
dynamicChainLocationGroupFilters_.set(index, value);
onChanged();
} else {
dynamicChainLocationGroupFiltersBuilder_.setMessage(index, value);
}
return this;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public Builder setDynamicChainLocationGroupFilters(
int index, com.google.ads.googleads.v20.common.ChainFilter.Builder builderForValue) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
ensureDynamicChainLocationGroupFiltersIsMutable();
dynamicChainLocationGroupFilters_.set(index, builderForValue.build());
onChanged();
} else {
dynamicChainLocationGroupFiltersBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public Builder addDynamicChainLocationGroupFilters(com.google.ads.googleads.v20.common.ChainFilter value) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDynamicChainLocationGroupFiltersIsMutable();
dynamicChainLocationGroupFilters_.add(value);
onChanged();
} else {
dynamicChainLocationGroupFiltersBuilder_.addMessage(value);
}
return this;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public Builder addDynamicChainLocationGroupFilters(
int index, com.google.ads.googleads.v20.common.ChainFilter value) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDynamicChainLocationGroupFiltersIsMutable();
dynamicChainLocationGroupFilters_.add(index, value);
onChanged();
} else {
dynamicChainLocationGroupFiltersBuilder_.addMessage(index, value);
}
return this;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public Builder addDynamicChainLocationGroupFilters(
com.google.ads.googleads.v20.common.ChainFilter.Builder builderForValue) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
ensureDynamicChainLocationGroupFiltersIsMutable();
dynamicChainLocationGroupFilters_.add(builderForValue.build());
onChanged();
} else {
dynamicChainLocationGroupFiltersBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public Builder addDynamicChainLocationGroupFilters(
int index, com.google.ads.googleads.v20.common.ChainFilter.Builder builderForValue) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
ensureDynamicChainLocationGroupFiltersIsMutable();
dynamicChainLocationGroupFilters_.add(index, builderForValue.build());
onChanged();
} else {
dynamicChainLocationGroupFiltersBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public Builder addAllDynamicChainLocationGroupFilters(
java.lang.Iterable<? extends com.google.ads.googleads.v20.common.ChainFilter> values) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
ensureDynamicChainLocationGroupFiltersIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, dynamicChainLocationGroupFilters_);
onChanged();
} else {
dynamicChainLocationGroupFiltersBuilder_.addAllMessages(values);
}
return this;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public Builder clearDynamicChainLocationGroupFilters() {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
dynamicChainLocationGroupFilters_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
dynamicChainLocationGroupFiltersBuilder_.clear();
}
return this;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public Builder removeDynamicChainLocationGroupFilters(int index) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
ensureDynamicChainLocationGroupFiltersIsMutable();
dynamicChainLocationGroupFilters_.remove(index);
onChanged();
} else {
dynamicChainLocationGroupFiltersBuilder_.remove(index);
}
return this;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public com.google.ads.googleads.v20.common.ChainFilter.Builder getDynamicChainLocationGroupFiltersBuilder(
int index) {
return getDynamicChainLocationGroupFiltersFieldBuilder().getBuilder(index);
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public com.google.ads.googleads.v20.common.ChainFilterOrBuilder getDynamicChainLocationGroupFiltersOrBuilder(
int index) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
return dynamicChainLocationGroupFilters_.get(index); } else {
return dynamicChainLocationGroupFiltersBuilder_.getMessageOrBuilder(index);
}
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public java.util.List<? extends com.google.ads.googleads.v20.common.ChainFilterOrBuilder>
getDynamicChainLocationGroupFiltersOrBuilderList() {
if (dynamicChainLocationGroupFiltersBuilder_ != null) {
return dynamicChainLocationGroupFiltersBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(dynamicChainLocationGroupFilters_);
}
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public com.google.ads.googleads.v20.common.ChainFilter.Builder addDynamicChainLocationGroupFiltersBuilder() {
return getDynamicChainLocationGroupFiltersFieldBuilder().addBuilder(
com.google.ads.googleads.v20.common.ChainFilter.getDefaultInstance());
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public com.google.ads.googleads.v20.common.ChainFilter.Builder addDynamicChainLocationGroupFiltersBuilder(
int index) {
return getDynamicChainLocationGroupFiltersFieldBuilder().addBuilder(
index, com.google.ads.googleads.v20.common.ChainFilter.getDefaultInstance());
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public java.util.List<com.google.ads.googleads.v20.common.ChainFilter.Builder>
getDynamicChainLocationGroupFiltersBuilderList() {
return getDynamicChainLocationGroupFiltersFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v20.common.ChainFilter, com.google.ads.googleads.v20.common.ChainFilter.Builder, com.google.ads.googleads.v20.common.ChainFilterOrBuilder>
getDynamicChainLocationGroupFiltersFieldBuilder() {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
dynamicChainLocationGroupFiltersBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v20.common.ChainFilter, com.google.ads.googleads.v20.common.ChainFilter.Builder, com.google.ads.googleads.v20.common.ChainFilterOrBuilder>(
dynamicChainLocationGroupFilters_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
dynamicChainLocationGroupFilters_ = null;
}
return dynamicChainLocationGroupFiltersBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.common.ChainLocationGroup)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v20.common.ChainLocationGroup)
private static final com.google.ads.googleads.v20.common.ChainLocationGroup DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v20.common.ChainLocationGroup();
}
public static com.google.ads.googleads.v20.common.ChainLocationGroup getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ChainLocationGroup>
PARSER = new com.google.protobuf.AbstractParser<ChainLocationGroup>() {
@java.lang.Override
public ChainLocationGroup parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ChainLocationGroup> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ChainLocationGroup> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v20.common.ChainLocationGroup getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 36,291 | google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/common/ChainLocationGroup.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v21/common/asset_set_types.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v21.common;
/**
* <pre>
* Represents information about a Chain dynamic location group.
* Only applicable if the sync level AssetSet's type is LOCATION_SYNC and
* sync source is chain.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.common.ChainLocationGroup}
*/
public final class ChainLocationGroup extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v21.common.ChainLocationGroup)
ChainLocationGroupOrBuilder {
private static final long serialVersionUID = 0L;
// Use ChainLocationGroup.newBuilder() to construct.
private ChainLocationGroup(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ChainLocationGroup() {
dynamicChainLocationGroupFilters_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ChainLocationGroup();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.common.AssetSetTypesProto.internal_static_google_ads_googleads_v21_common_ChainLocationGroup_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.common.AssetSetTypesProto.internal_static_google_ads_googleads_v21_common_ChainLocationGroup_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.common.ChainLocationGroup.class, com.google.ads.googleads.v21.common.ChainLocationGroup.Builder.class);
}
public static final int DYNAMIC_CHAIN_LOCATION_GROUP_FILTERS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.ads.googleads.v21.common.ChainFilter> dynamicChainLocationGroupFilters_;
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.ads.googleads.v21.common.ChainFilter> getDynamicChainLocationGroupFiltersList() {
return dynamicChainLocationGroupFilters_;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.ads.googleads.v21.common.ChainFilterOrBuilder>
getDynamicChainLocationGroupFiltersOrBuilderList() {
return dynamicChainLocationGroupFilters_;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
@java.lang.Override
public int getDynamicChainLocationGroupFiltersCount() {
return dynamicChainLocationGroupFilters_.size();
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v21.common.ChainFilter getDynamicChainLocationGroupFilters(int index) {
return dynamicChainLocationGroupFilters_.get(index);
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v21.common.ChainFilterOrBuilder getDynamicChainLocationGroupFiltersOrBuilder(
int index) {
return dynamicChainLocationGroupFilters_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < dynamicChainLocationGroupFilters_.size(); i++) {
output.writeMessage(1, dynamicChainLocationGroupFilters_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < dynamicChainLocationGroupFilters_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, dynamicChainLocationGroupFilters_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v21.common.ChainLocationGroup)) {
return super.equals(obj);
}
com.google.ads.googleads.v21.common.ChainLocationGroup other = (com.google.ads.googleads.v21.common.ChainLocationGroup) obj;
if (!getDynamicChainLocationGroupFiltersList()
.equals(other.getDynamicChainLocationGroupFiltersList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDynamicChainLocationGroupFiltersCount() > 0) {
hash = (37 * hash) + DYNAMIC_CHAIN_LOCATION_GROUP_FILTERS_FIELD_NUMBER;
hash = (53 * hash) + getDynamicChainLocationGroupFiltersList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v21.common.ChainLocationGroup parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.common.ChainLocationGroup parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.common.ChainLocationGroup parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.common.ChainLocationGroup parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.common.ChainLocationGroup parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.common.ChainLocationGroup parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.common.ChainLocationGroup parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.common.ChainLocationGroup parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.common.ChainLocationGroup parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.common.ChainLocationGroup parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.common.ChainLocationGroup parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.common.ChainLocationGroup parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v21.common.ChainLocationGroup prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Represents information about a Chain dynamic location group.
* Only applicable if the sync level AssetSet's type is LOCATION_SYNC and
* sync source is chain.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.common.ChainLocationGroup}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.common.ChainLocationGroup)
com.google.ads.googleads.v21.common.ChainLocationGroupOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.common.AssetSetTypesProto.internal_static_google_ads_googleads_v21_common_ChainLocationGroup_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.common.AssetSetTypesProto.internal_static_google_ads_googleads_v21_common_ChainLocationGroup_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.common.ChainLocationGroup.class, com.google.ads.googleads.v21.common.ChainLocationGroup.Builder.class);
}
// Construct using com.google.ads.googleads.v21.common.ChainLocationGroup.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
dynamicChainLocationGroupFilters_ = java.util.Collections.emptyList();
} else {
dynamicChainLocationGroupFilters_ = null;
dynamicChainLocationGroupFiltersBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v21.common.AssetSetTypesProto.internal_static_google_ads_googleads_v21_common_ChainLocationGroup_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v21.common.ChainLocationGroup getDefaultInstanceForType() {
return com.google.ads.googleads.v21.common.ChainLocationGroup.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v21.common.ChainLocationGroup build() {
com.google.ads.googleads.v21.common.ChainLocationGroup result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v21.common.ChainLocationGroup buildPartial() {
com.google.ads.googleads.v21.common.ChainLocationGroup result = new com.google.ads.googleads.v21.common.ChainLocationGroup(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.ads.googleads.v21.common.ChainLocationGroup result) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
dynamicChainLocationGroupFilters_ = java.util.Collections.unmodifiableList(dynamicChainLocationGroupFilters_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.dynamicChainLocationGroupFilters_ = dynamicChainLocationGroupFilters_;
} else {
result.dynamicChainLocationGroupFilters_ = dynamicChainLocationGroupFiltersBuilder_.build();
}
}
private void buildPartial0(com.google.ads.googleads.v21.common.ChainLocationGroup result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v21.common.ChainLocationGroup) {
return mergeFrom((com.google.ads.googleads.v21.common.ChainLocationGroup)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v21.common.ChainLocationGroup other) {
if (other == com.google.ads.googleads.v21.common.ChainLocationGroup.getDefaultInstance()) return this;
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
if (!other.dynamicChainLocationGroupFilters_.isEmpty()) {
if (dynamicChainLocationGroupFilters_.isEmpty()) {
dynamicChainLocationGroupFilters_ = other.dynamicChainLocationGroupFilters_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDynamicChainLocationGroupFiltersIsMutable();
dynamicChainLocationGroupFilters_.addAll(other.dynamicChainLocationGroupFilters_);
}
onChanged();
}
} else {
if (!other.dynamicChainLocationGroupFilters_.isEmpty()) {
if (dynamicChainLocationGroupFiltersBuilder_.isEmpty()) {
dynamicChainLocationGroupFiltersBuilder_.dispose();
dynamicChainLocationGroupFiltersBuilder_ = null;
dynamicChainLocationGroupFilters_ = other.dynamicChainLocationGroupFilters_;
bitField0_ = (bitField0_ & ~0x00000001);
dynamicChainLocationGroupFiltersBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getDynamicChainLocationGroupFiltersFieldBuilder() : null;
} else {
dynamicChainLocationGroupFiltersBuilder_.addAllMessages(other.dynamicChainLocationGroupFilters_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
com.google.ads.googleads.v21.common.ChainFilter m =
input.readMessage(
com.google.ads.googleads.v21.common.ChainFilter.parser(),
extensionRegistry);
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
ensureDynamicChainLocationGroupFiltersIsMutable();
dynamicChainLocationGroupFilters_.add(m);
} else {
dynamicChainLocationGroupFiltersBuilder_.addMessage(m);
}
break;
} // case 10
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.ads.googleads.v21.common.ChainFilter> dynamicChainLocationGroupFilters_ =
java.util.Collections.emptyList();
private void ensureDynamicChainLocationGroupFiltersIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
dynamicChainLocationGroupFilters_ = new java.util.ArrayList<com.google.ads.googleads.v21.common.ChainFilter>(dynamicChainLocationGroupFilters_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v21.common.ChainFilter, com.google.ads.googleads.v21.common.ChainFilter.Builder, com.google.ads.googleads.v21.common.ChainFilterOrBuilder> dynamicChainLocationGroupFiltersBuilder_;
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public java.util.List<com.google.ads.googleads.v21.common.ChainFilter> getDynamicChainLocationGroupFiltersList() {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
return java.util.Collections.unmodifiableList(dynamicChainLocationGroupFilters_);
} else {
return dynamicChainLocationGroupFiltersBuilder_.getMessageList();
}
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public int getDynamicChainLocationGroupFiltersCount() {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
return dynamicChainLocationGroupFilters_.size();
} else {
return dynamicChainLocationGroupFiltersBuilder_.getCount();
}
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public com.google.ads.googleads.v21.common.ChainFilter getDynamicChainLocationGroupFilters(int index) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
return dynamicChainLocationGroupFilters_.get(index);
} else {
return dynamicChainLocationGroupFiltersBuilder_.getMessage(index);
}
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public Builder setDynamicChainLocationGroupFilters(
int index, com.google.ads.googleads.v21.common.ChainFilter value) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDynamicChainLocationGroupFiltersIsMutable();
dynamicChainLocationGroupFilters_.set(index, value);
onChanged();
} else {
dynamicChainLocationGroupFiltersBuilder_.setMessage(index, value);
}
return this;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public Builder setDynamicChainLocationGroupFilters(
int index, com.google.ads.googleads.v21.common.ChainFilter.Builder builderForValue) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
ensureDynamicChainLocationGroupFiltersIsMutable();
dynamicChainLocationGroupFilters_.set(index, builderForValue.build());
onChanged();
} else {
dynamicChainLocationGroupFiltersBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public Builder addDynamicChainLocationGroupFilters(com.google.ads.googleads.v21.common.ChainFilter value) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDynamicChainLocationGroupFiltersIsMutable();
dynamicChainLocationGroupFilters_.add(value);
onChanged();
} else {
dynamicChainLocationGroupFiltersBuilder_.addMessage(value);
}
return this;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public Builder addDynamicChainLocationGroupFilters(
int index, com.google.ads.googleads.v21.common.ChainFilter value) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDynamicChainLocationGroupFiltersIsMutable();
dynamicChainLocationGroupFilters_.add(index, value);
onChanged();
} else {
dynamicChainLocationGroupFiltersBuilder_.addMessage(index, value);
}
return this;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public Builder addDynamicChainLocationGroupFilters(
com.google.ads.googleads.v21.common.ChainFilter.Builder builderForValue) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
ensureDynamicChainLocationGroupFiltersIsMutable();
dynamicChainLocationGroupFilters_.add(builderForValue.build());
onChanged();
} else {
dynamicChainLocationGroupFiltersBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public Builder addDynamicChainLocationGroupFilters(
int index, com.google.ads.googleads.v21.common.ChainFilter.Builder builderForValue) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
ensureDynamicChainLocationGroupFiltersIsMutable();
dynamicChainLocationGroupFilters_.add(index, builderForValue.build());
onChanged();
} else {
dynamicChainLocationGroupFiltersBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public Builder addAllDynamicChainLocationGroupFilters(
java.lang.Iterable<? extends com.google.ads.googleads.v21.common.ChainFilter> values) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
ensureDynamicChainLocationGroupFiltersIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, dynamicChainLocationGroupFilters_);
onChanged();
} else {
dynamicChainLocationGroupFiltersBuilder_.addAllMessages(values);
}
return this;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public Builder clearDynamicChainLocationGroupFilters() {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
dynamicChainLocationGroupFilters_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
dynamicChainLocationGroupFiltersBuilder_.clear();
}
return this;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public Builder removeDynamicChainLocationGroupFilters(int index) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
ensureDynamicChainLocationGroupFiltersIsMutable();
dynamicChainLocationGroupFilters_.remove(index);
onChanged();
} else {
dynamicChainLocationGroupFiltersBuilder_.remove(index);
}
return this;
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public com.google.ads.googleads.v21.common.ChainFilter.Builder getDynamicChainLocationGroupFiltersBuilder(
int index) {
return getDynamicChainLocationGroupFiltersFieldBuilder().getBuilder(index);
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public com.google.ads.googleads.v21.common.ChainFilterOrBuilder getDynamicChainLocationGroupFiltersOrBuilder(
int index) {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
return dynamicChainLocationGroupFilters_.get(index); } else {
return dynamicChainLocationGroupFiltersBuilder_.getMessageOrBuilder(index);
}
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public java.util.List<? extends com.google.ads.googleads.v21.common.ChainFilterOrBuilder>
getDynamicChainLocationGroupFiltersOrBuilderList() {
if (dynamicChainLocationGroupFiltersBuilder_ != null) {
return dynamicChainLocationGroupFiltersBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(dynamicChainLocationGroupFilters_);
}
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public com.google.ads.googleads.v21.common.ChainFilter.Builder addDynamicChainLocationGroupFiltersBuilder() {
return getDynamicChainLocationGroupFiltersFieldBuilder().addBuilder(
com.google.ads.googleads.v21.common.ChainFilter.getDefaultInstance());
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public com.google.ads.googleads.v21.common.ChainFilter.Builder addDynamicChainLocationGroupFiltersBuilder(
int index) {
return getDynamicChainLocationGroupFiltersFieldBuilder().addBuilder(
index, com.google.ads.googleads.v21.common.ChainFilter.getDefaultInstance());
}
/**
* <pre>
* Used to filter chain locations by chain ids.
* Only Locations that belong to the specified chain(s) will be in the asset
* set.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.common.ChainFilter dynamic_chain_location_group_filters = 1;</code>
*/
public java.util.List<com.google.ads.googleads.v21.common.ChainFilter.Builder>
getDynamicChainLocationGroupFiltersBuilderList() {
return getDynamicChainLocationGroupFiltersFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v21.common.ChainFilter, com.google.ads.googleads.v21.common.ChainFilter.Builder, com.google.ads.googleads.v21.common.ChainFilterOrBuilder>
getDynamicChainLocationGroupFiltersFieldBuilder() {
if (dynamicChainLocationGroupFiltersBuilder_ == null) {
dynamicChainLocationGroupFiltersBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v21.common.ChainFilter, com.google.ads.googleads.v21.common.ChainFilter.Builder, com.google.ads.googleads.v21.common.ChainFilterOrBuilder>(
dynamicChainLocationGroupFilters_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
dynamicChainLocationGroupFilters_ = null;
}
return dynamicChainLocationGroupFiltersBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.common.ChainLocationGroup)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v21.common.ChainLocationGroup)
private static final com.google.ads.googleads.v21.common.ChainLocationGroup DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v21.common.ChainLocationGroup();
}
public static com.google.ads.googleads.v21.common.ChainLocationGroup getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ChainLocationGroup>
PARSER = new com.google.protobuf.AbstractParser<ChainLocationGroup>() {
@java.lang.Override
public ChainLocationGroup parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ChainLocationGroup> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ChainLocationGroup> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v21.common.ChainLocationGroup getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,913 | java-parallelstore/proto-google-cloud-parallelstore-v1beta/src/main/java/com/google/cloud/parallelstore/v1beta/TransferCounters.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/parallelstore/v1beta/parallelstore.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.parallelstore.v1beta;
/**
*
*
* <pre>
* A collection of counters that report the progress of a transfer operation.
* </pre>
*
* Protobuf type {@code google.cloud.parallelstore.v1beta.TransferCounters}
*/
public final class TransferCounters extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.parallelstore.v1beta.TransferCounters)
TransferCountersOrBuilder {
private static final long serialVersionUID = 0L;
// Use TransferCounters.newBuilder() to construct.
private TransferCounters(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TransferCounters() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new TransferCounters();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.parallelstore.v1beta.ParallelstoreProto
.internal_static_google_cloud_parallelstore_v1beta_TransferCounters_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.parallelstore.v1beta.ParallelstoreProto
.internal_static_google_cloud_parallelstore_v1beta_TransferCounters_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.parallelstore.v1beta.TransferCounters.class,
com.google.cloud.parallelstore.v1beta.TransferCounters.Builder.class);
}
public static final int OBJECTS_FOUND_FIELD_NUMBER = 1;
private long objectsFound_ = 0L;
/**
*
*
* <pre>
* Objects found in the data source that are scheduled to be transferred,
* excluding any that are filtered based on object conditions or skipped due
* to sync.
* </pre>
*
* <code>int64 objects_found = 1;</code>
*
* @return The objectsFound.
*/
@java.lang.Override
public long getObjectsFound() {
return objectsFound_;
}
public static final int BYTES_FOUND_FIELD_NUMBER = 2;
private long bytesFound_ = 0L;
/**
*
*
* <pre>
* Bytes found in the data source that are scheduled to be transferred,
* excluding any that are filtered based on object conditions or skipped due
* to sync.
* </pre>
*
* <code>int64 bytes_found = 2;</code>
*
* @return The bytesFound.
*/
@java.lang.Override
public long getBytesFound() {
return bytesFound_;
}
public static final int OBJECTS_SKIPPED_FIELD_NUMBER = 3;
private long objectsSkipped_ = 0L;
/**
*
*
* <pre>
* Objects in the data source that are not transferred because they already
* exist in the data destination.
* </pre>
*
* <code>int64 objects_skipped = 3;</code>
*
* @return The objectsSkipped.
*/
@java.lang.Override
public long getObjectsSkipped() {
return objectsSkipped_;
}
public static final int BYTES_SKIPPED_FIELD_NUMBER = 4;
private long bytesSkipped_ = 0L;
/**
*
*
* <pre>
* Bytes in the data source that are not transferred because they already
* exist in the data destination.
* </pre>
*
* <code>int64 bytes_skipped = 4;</code>
*
* @return The bytesSkipped.
*/
@java.lang.Override
public long getBytesSkipped() {
return bytesSkipped_;
}
public static final int OBJECTS_COPIED_FIELD_NUMBER = 5;
private long objectsCopied_ = 0L;
/**
*
*
* <pre>
* Objects that are copied to the data destination.
* </pre>
*
* <code>int64 objects_copied = 5;</code>
*
* @return The objectsCopied.
*/
@java.lang.Override
public long getObjectsCopied() {
return objectsCopied_;
}
public static final int BYTES_COPIED_FIELD_NUMBER = 6;
private long bytesCopied_ = 0L;
/**
*
*
* <pre>
* Bytes that are copied to the data destination.
* </pre>
*
* <code>int64 bytes_copied = 6;</code>
*
* @return The bytesCopied.
*/
@java.lang.Override
public long getBytesCopied() {
return bytesCopied_;
}
public static final int OBJECTS_FAILED_FIELD_NUMBER = 7;
private long objectsFailed_ = 0L;
/**
*
*
* <pre>
* Objects that failed to be written to the data destination.
* </pre>
*
* <code>int64 objects_failed = 7;</code>
*
* @return The objectsFailed.
*/
@java.lang.Override
public long getObjectsFailed() {
return objectsFailed_;
}
public static final int BYTES_FAILED_FIELD_NUMBER = 8;
private long bytesFailed_ = 0L;
/**
*
*
* <pre>
* Bytes that failed to be written to the data destination.
* </pre>
*
* <code>int64 bytes_failed = 8;</code>
*
* @return The bytesFailed.
*/
@java.lang.Override
public long getBytesFailed() {
return bytesFailed_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (objectsFound_ != 0L) {
output.writeInt64(1, objectsFound_);
}
if (bytesFound_ != 0L) {
output.writeInt64(2, bytesFound_);
}
if (objectsSkipped_ != 0L) {
output.writeInt64(3, objectsSkipped_);
}
if (bytesSkipped_ != 0L) {
output.writeInt64(4, bytesSkipped_);
}
if (objectsCopied_ != 0L) {
output.writeInt64(5, objectsCopied_);
}
if (bytesCopied_ != 0L) {
output.writeInt64(6, bytesCopied_);
}
if (objectsFailed_ != 0L) {
output.writeInt64(7, objectsFailed_);
}
if (bytesFailed_ != 0L) {
output.writeInt64(8, bytesFailed_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (objectsFound_ != 0L) {
size += com.google.protobuf.CodedOutputStream.computeInt64Size(1, objectsFound_);
}
if (bytesFound_ != 0L) {
size += com.google.protobuf.CodedOutputStream.computeInt64Size(2, bytesFound_);
}
if (objectsSkipped_ != 0L) {
size += com.google.protobuf.CodedOutputStream.computeInt64Size(3, objectsSkipped_);
}
if (bytesSkipped_ != 0L) {
size += com.google.protobuf.CodedOutputStream.computeInt64Size(4, bytesSkipped_);
}
if (objectsCopied_ != 0L) {
size += com.google.protobuf.CodedOutputStream.computeInt64Size(5, objectsCopied_);
}
if (bytesCopied_ != 0L) {
size += com.google.protobuf.CodedOutputStream.computeInt64Size(6, bytesCopied_);
}
if (objectsFailed_ != 0L) {
size += com.google.protobuf.CodedOutputStream.computeInt64Size(7, objectsFailed_);
}
if (bytesFailed_ != 0L) {
size += com.google.protobuf.CodedOutputStream.computeInt64Size(8, bytesFailed_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.parallelstore.v1beta.TransferCounters)) {
return super.equals(obj);
}
com.google.cloud.parallelstore.v1beta.TransferCounters other =
(com.google.cloud.parallelstore.v1beta.TransferCounters) obj;
if (getObjectsFound() != other.getObjectsFound()) return false;
if (getBytesFound() != other.getBytesFound()) return false;
if (getObjectsSkipped() != other.getObjectsSkipped()) return false;
if (getBytesSkipped() != other.getBytesSkipped()) return false;
if (getObjectsCopied() != other.getObjectsCopied()) return false;
if (getBytesCopied() != other.getBytesCopied()) return false;
if (getObjectsFailed() != other.getObjectsFailed()) return false;
if (getBytesFailed() != other.getBytesFailed()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + OBJECTS_FOUND_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getObjectsFound());
hash = (37 * hash) + BYTES_FOUND_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getBytesFound());
hash = (37 * hash) + OBJECTS_SKIPPED_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getObjectsSkipped());
hash = (37 * hash) + BYTES_SKIPPED_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getBytesSkipped());
hash = (37 * hash) + OBJECTS_COPIED_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getObjectsCopied());
hash = (37 * hash) + BYTES_COPIED_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getBytesCopied());
hash = (37 * hash) + OBJECTS_FAILED_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getObjectsFailed());
hash = (37 * hash) + BYTES_FAILED_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getBytesFailed());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.parallelstore.v1beta.TransferCounters parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.parallelstore.v1beta.TransferCounters parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.parallelstore.v1beta.TransferCounters parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.parallelstore.v1beta.TransferCounters parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.parallelstore.v1beta.TransferCounters parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.parallelstore.v1beta.TransferCounters parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.parallelstore.v1beta.TransferCounters parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.parallelstore.v1beta.TransferCounters parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.parallelstore.v1beta.TransferCounters parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.parallelstore.v1beta.TransferCounters parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.parallelstore.v1beta.TransferCounters parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.parallelstore.v1beta.TransferCounters parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.parallelstore.v1beta.TransferCounters prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A collection of counters that report the progress of a transfer operation.
* </pre>
*
* Protobuf type {@code google.cloud.parallelstore.v1beta.TransferCounters}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.parallelstore.v1beta.TransferCounters)
com.google.cloud.parallelstore.v1beta.TransferCountersOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.parallelstore.v1beta.ParallelstoreProto
.internal_static_google_cloud_parallelstore_v1beta_TransferCounters_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.parallelstore.v1beta.ParallelstoreProto
.internal_static_google_cloud_parallelstore_v1beta_TransferCounters_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.parallelstore.v1beta.TransferCounters.class,
com.google.cloud.parallelstore.v1beta.TransferCounters.Builder.class);
}
// Construct using com.google.cloud.parallelstore.v1beta.TransferCounters.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
objectsFound_ = 0L;
bytesFound_ = 0L;
objectsSkipped_ = 0L;
bytesSkipped_ = 0L;
objectsCopied_ = 0L;
bytesCopied_ = 0L;
objectsFailed_ = 0L;
bytesFailed_ = 0L;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.parallelstore.v1beta.ParallelstoreProto
.internal_static_google_cloud_parallelstore_v1beta_TransferCounters_descriptor;
}
@java.lang.Override
public com.google.cloud.parallelstore.v1beta.TransferCounters getDefaultInstanceForType() {
return com.google.cloud.parallelstore.v1beta.TransferCounters.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.parallelstore.v1beta.TransferCounters build() {
com.google.cloud.parallelstore.v1beta.TransferCounters result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.parallelstore.v1beta.TransferCounters buildPartial() {
com.google.cloud.parallelstore.v1beta.TransferCounters result =
new com.google.cloud.parallelstore.v1beta.TransferCounters(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.parallelstore.v1beta.TransferCounters result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.objectsFound_ = objectsFound_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.bytesFound_ = bytesFound_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.objectsSkipped_ = objectsSkipped_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.bytesSkipped_ = bytesSkipped_;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.objectsCopied_ = objectsCopied_;
}
if (((from_bitField0_ & 0x00000020) != 0)) {
result.bytesCopied_ = bytesCopied_;
}
if (((from_bitField0_ & 0x00000040) != 0)) {
result.objectsFailed_ = objectsFailed_;
}
if (((from_bitField0_ & 0x00000080) != 0)) {
result.bytesFailed_ = bytesFailed_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.parallelstore.v1beta.TransferCounters) {
return mergeFrom((com.google.cloud.parallelstore.v1beta.TransferCounters) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.parallelstore.v1beta.TransferCounters other) {
if (other == com.google.cloud.parallelstore.v1beta.TransferCounters.getDefaultInstance())
return this;
if (other.getObjectsFound() != 0L) {
setObjectsFound(other.getObjectsFound());
}
if (other.getBytesFound() != 0L) {
setBytesFound(other.getBytesFound());
}
if (other.getObjectsSkipped() != 0L) {
setObjectsSkipped(other.getObjectsSkipped());
}
if (other.getBytesSkipped() != 0L) {
setBytesSkipped(other.getBytesSkipped());
}
if (other.getObjectsCopied() != 0L) {
setObjectsCopied(other.getObjectsCopied());
}
if (other.getBytesCopied() != 0L) {
setBytesCopied(other.getBytesCopied());
}
if (other.getObjectsFailed() != 0L) {
setObjectsFailed(other.getObjectsFailed());
}
if (other.getBytesFailed() != 0L) {
setBytesFailed(other.getBytesFailed());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
objectsFound_ = input.readInt64();
bitField0_ |= 0x00000001;
break;
} // case 8
case 16:
{
bytesFound_ = input.readInt64();
bitField0_ |= 0x00000002;
break;
} // case 16
case 24:
{
objectsSkipped_ = input.readInt64();
bitField0_ |= 0x00000004;
break;
} // case 24
case 32:
{
bytesSkipped_ = input.readInt64();
bitField0_ |= 0x00000008;
break;
} // case 32
case 40:
{
objectsCopied_ = input.readInt64();
bitField0_ |= 0x00000010;
break;
} // case 40
case 48:
{
bytesCopied_ = input.readInt64();
bitField0_ |= 0x00000020;
break;
} // case 48
case 56:
{
objectsFailed_ = input.readInt64();
bitField0_ |= 0x00000040;
break;
} // case 56
case 64:
{
bytesFailed_ = input.readInt64();
bitField0_ |= 0x00000080;
break;
} // case 64
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private long objectsFound_;
/**
*
*
* <pre>
* Objects found in the data source that are scheduled to be transferred,
* excluding any that are filtered based on object conditions or skipped due
* to sync.
* </pre>
*
* <code>int64 objects_found = 1;</code>
*
* @return The objectsFound.
*/
@java.lang.Override
public long getObjectsFound() {
return objectsFound_;
}
/**
*
*
* <pre>
* Objects found in the data source that are scheduled to be transferred,
* excluding any that are filtered based on object conditions or skipped due
* to sync.
* </pre>
*
* <code>int64 objects_found = 1;</code>
*
* @param value The objectsFound to set.
* @return This builder for chaining.
*/
public Builder setObjectsFound(long value) {
objectsFound_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Objects found in the data source that are scheduled to be transferred,
* excluding any that are filtered based on object conditions or skipped due
* to sync.
* </pre>
*
* <code>int64 objects_found = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearObjectsFound() {
bitField0_ = (bitField0_ & ~0x00000001);
objectsFound_ = 0L;
onChanged();
return this;
}
private long bytesFound_;
/**
*
*
* <pre>
* Bytes found in the data source that are scheduled to be transferred,
* excluding any that are filtered based on object conditions or skipped due
* to sync.
* </pre>
*
* <code>int64 bytes_found = 2;</code>
*
* @return The bytesFound.
*/
@java.lang.Override
public long getBytesFound() {
return bytesFound_;
}
/**
*
*
* <pre>
* Bytes found in the data source that are scheduled to be transferred,
* excluding any that are filtered based on object conditions or skipped due
* to sync.
* </pre>
*
* <code>int64 bytes_found = 2;</code>
*
* @param value The bytesFound to set.
* @return This builder for chaining.
*/
public Builder setBytesFound(long value) {
bytesFound_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Bytes found in the data source that are scheduled to be transferred,
* excluding any that are filtered based on object conditions or skipped due
* to sync.
* </pre>
*
* <code>int64 bytes_found = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearBytesFound() {
bitField0_ = (bitField0_ & ~0x00000002);
bytesFound_ = 0L;
onChanged();
return this;
}
private long objectsSkipped_;
/**
*
*
* <pre>
* Objects in the data source that are not transferred because they already
* exist in the data destination.
* </pre>
*
* <code>int64 objects_skipped = 3;</code>
*
* @return The objectsSkipped.
*/
@java.lang.Override
public long getObjectsSkipped() {
return objectsSkipped_;
}
/**
*
*
* <pre>
* Objects in the data source that are not transferred because they already
* exist in the data destination.
* </pre>
*
* <code>int64 objects_skipped = 3;</code>
*
* @param value The objectsSkipped to set.
* @return This builder for chaining.
*/
public Builder setObjectsSkipped(long value) {
objectsSkipped_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Objects in the data source that are not transferred because they already
* exist in the data destination.
* </pre>
*
* <code>int64 objects_skipped = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearObjectsSkipped() {
bitField0_ = (bitField0_ & ~0x00000004);
objectsSkipped_ = 0L;
onChanged();
return this;
}
private long bytesSkipped_;
/**
*
*
* <pre>
* Bytes in the data source that are not transferred because they already
* exist in the data destination.
* </pre>
*
* <code>int64 bytes_skipped = 4;</code>
*
* @return The bytesSkipped.
*/
@java.lang.Override
public long getBytesSkipped() {
return bytesSkipped_;
}
/**
*
*
* <pre>
* Bytes in the data source that are not transferred because they already
* exist in the data destination.
* </pre>
*
* <code>int64 bytes_skipped = 4;</code>
*
* @param value The bytesSkipped to set.
* @return This builder for chaining.
*/
public Builder setBytesSkipped(long value) {
bytesSkipped_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Bytes in the data source that are not transferred because they already
* exist in the data destination.
* </pre>
*
* <code>int64 bytes_skipped = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearBytesSkipped() {
bitField0_ = (bitField0_ & ~0x00000008);
bytesSkipped_ = 0L;
onChanged();
return this;
}
private long objectsCopied_;
/**
*
*
* <pre>
* Objects that are copied to the data destination.
* </pre>
*
* <code>int64 objects_copied = 5;</code>
*
* @return The objectsCopied.
*/
@java.lang.Override
public long getObjectsCopied() {
return objectsCopied_;
}
/**
*
*
* <pre>
* Objects that are copied to the data destination.
* </pre>
*
* <code>int64 objects_copied = 5;</code>
*
* @param value The objectsCopied to set.
* @return This builder for chaining.
*/
public Builder setObjectsCopied(long value) {
objectsCopied_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
*
*
* <pre>
* Objects that are copied to the data destination.
* </pre>
*
* <code>int64 objects_copied = 5;</code>
*
* @return This builder for chaining.
*/
public Builder clearObjectsCopied() {
bitField0_ = (bitField0_ & ~0x00000010);
objectsCopied_ = 0L;
onChanged();
return this;
}
private long bytesCopied_;
/**
*
*
* <pre>
* Bytes that are copied to the data destination.
* </pre>
*
* <code>int64 bytes_copied = 6;</code>
*
* @return The bytesCopied.
*/
@java.lang.Override
public long getBytesCopied() {
return bytesCopied_;
}
/**
*
*
* <pre>
* Bytes that are copied to the data destination.
* </pre>
*
* <code>int64 bytes_copied = 6;</code>
*
* @param value The bytesCopied to set.
* @return This builder for chaining.
*/
public Builder setBytesCopied(long value) {
bytesCopied_ = value;
bitField0_ |= 0x00000020;
onChanged();
return this;
}
/**
*
*
* <pre>
* Bytes that are copied to the data destination.
* </pre>
*
* <code>int64 bytes_copied = 6;</code>
*
* @return This builder for chaining.
*/
public Builder clearBytesCopied() {
bitField0_ = (bitField0_ & ~0x00000020);
bytesCopied_ = 0L;
onChanged();
return this;
}
private long objectsFailed_;
/**
*
*
* <pre>
* Objects that failed to be written to the data destination.
* </pre>
*
* <code>int64 objects_failed = 7;</code>
*
* @return The objectsFailed.
*/
@java.lang.Override
public long getObjectsFailed() {
return objectsFailed_;
}
/**
*
*
* <pre>
* Objects that failed to be written to the data destination.
* </pre>
*
* <code>int64 objects_failed = 7;</code>
*
* @param value The objectsFailed to set.
* @return This builder for chaining.
*/
public Builder setObjectsFailed(long value) {
objectsFailed_ = value;
bitField0_ |= 0x00000040;
onChanged();
return this;
}
/**
*
*
* <pre>
* Objects that failed to be written to the data destination.
* </pre>
*
* <code>int64 objects_failed = 7;</code>
*
* @return This builder for chaining.
*/
public Builder clearObjectsFailed() {
bitField0_ = (bitField0_ & ~0x00000040);
objectsFailed_ = 0L;
onChanged();
return this;
}
private long bytesFailed_;
/**
*
*
* <pre>
* Bytes that failed to be written to the data destination.
* </pre>
*
* <code>int64 bytes_failed = 8;</code>
*
* @return The bytesFailed.
*/
@java.lang.Override
public long getBytesFailed() {
return bytesFailed_;
}
/**
*
*
* <pre>
* Bytes that failed to be written to the data destination.
* </pre>
*
* <code>int64 bytes_failed = 8;</code>
*
* @param value The bytesFailed to set.
* @return This builder for chaining.
*/
public Builder setBytesFailed(long value) {
bytesFailed_ = value;
bitField0_ |= 0x00000080;
onChanged();
return this;
}
/**
*
*
* <pre>
* Bytes that failed to be written to the data destination.
* </pre>
*
* <code>int64 bytes_failed = 8;</code>
*
* @return This builder for chaining.
*/
public Builder clearBytesFailed() {
bitField0_ = (bitField0_ & ~0x00000080);
bytesFailed_ = 0L;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.parallelstore.v1beta.TransferCounters)
}
// @@protoc_insertion_point(class_scope:google.cloud.parallelstore.v1beta.TransferCounters)
private static final com.google.cloud.parallelstore.v1beta.TransferCounters DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.parallelstore.v1beta.TransferCounters();
}
public static com.google.cloud.parallelstore.v1beta.TransferCounters getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<TransferCounters> PARSER =
new com.google.protobuf.AbstractParser<TransferCounters>() {
@java.lang.Override
public TransferCounters parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<TransferCounters> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<TransferCounters> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.parallelstore.v1beta.TransferCounters getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,991 | java-datalabeling/proto-google-cloud-datalabeling-v1beta1/src/main/java/com/google/cloud/datalabeling/v1beta1/ListDatasetsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datalabeling/v1beta1/data_labeling_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datalabeling.v1beta1;
/**
*
*
* <pre>
* Results of listing datasets within a project.
* </pre>
*
* Protobuf type {@code google.cloud.datalabeling.v1beta1.ListDatasetsResponse}
*/
public final class ListDatasetsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datalabeling.v1beta1.ListDatasetsResponse)
ListDatasetsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListDatasetsResponse.newBuilder() to construct.
private ListDatasetsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListDatasetsResponse() {
datasets_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListDatasetsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datalabeling.v1beta1.DataLabelingServiceOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_ListDatasetsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datalabeling.v1beta1.DataLabelingServiceOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_ListDatasetsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse.class,
com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse.Builder.class);
}
public static final int DATASETS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.datalabeling.v1beta1.Dataset> datasets_;
/**
*
*
* <pre>
* The list of datasets to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Dataset datasets = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.datalabeling.v1beta1.Dataset> getDatasetsList() {
return datasets_;
}
/**
*
*
* <pre>
* The list of datasets to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Dataset datasets = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.datalabeling.v1beta1.DatasetOrBuilder>
getDatasetsOrBuilderList() {
return datasets_;
}
/**
*
*
* <pre>
* The list of datasets to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Dataset datasets = 1;</code>
*/
@java.lang.Override
public int getDatasetsCount() {
return datasets_.size();
}
/**
*
*
* <pre>
* The list of datasets to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Dataset datasets = 1;</code>
*/
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.Dataset getDatasets(int index) {
return datasets_.get(index);
}
/**
*
*
* <pre>
* The list of datasets to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Dataset datasets = 1;</code>
*/
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.DatasetOrBuilder getDatasetsOrBuilder(int index) {
return datasets_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < datasets_.size(); i++) {
output.writeMessage(1, datasets_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < datasets_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, datasets_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse)) {
return super.equals(obj);
}
com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse other =
(com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse) obj;
if (!getDatasetsList().equals(other.getDatasetsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDatasetsCount() > 0) {
hash = (37 * hash) + DATASETS_FIELD_NUMBER;
hash = (53 * hash) + getDatasetsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Results of listing datasets within a project.
* </pre>
*
* Protobuf type {@code google.cloud.datalabeling.v1beta1.ListDatasetsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datalabeling.v1beta1.ListDatasetsResponse)
com.google.cloud.datalabeling.v1beta1.ListDatasetsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datalabeling.v1beta1.DataLabelingServiceOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_ListDatasetsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datalabeling.v1beta1.DataLabelingServiceOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_ListDatasetsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse.class,
com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse.Builder.class);
}
// Construct using com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (datasetsBuilder_ == null) {
datasets_ = java.util.Collections.emptyList();
} else {
datasets_ = null;
datasetsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datalabeling.v1beta1.DataLabelingServiceOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_ListDatasetsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse getDefaultInstanceForType() {
return com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse build() {
com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse buildPartial() {
com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse result =
new com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse result) {
if (datasetsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
datasets_ = java.util.Collections.unmodifiableList(datasets_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.datasets_ = datasets_;
} else {
result.datasets_ = datasetsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse) {
return mergeFrom((com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse other) {
if (other == com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse.getDefaultInstance())
return this;
if (datasetsBuilder_ == null) {
if (!other.datasets_.isEmpty()) {
if (datasets_.isEmpty()) {
datasets_ = other.datasets_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDatasetsIsMutable();
datasets_.addAll(other.datasets_);
}
onChanged();
}
} else {
if (!other.datasets_.isEmpty()) {
if (datasetsBuilder_.isEmpty()) {
datasetsBuilder_.dispose();
datasetsBuilder_ = null;
datasets_ = other.datasets_;
bitField0_ = (bitField0_ & ~0x00000001);
datasetsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getDatasetsFieldBuilder()
: null;
} else {
datasetsBuilder_.addAllMessages(other.datasets_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.datalabeling.v1beta1.Dataset m =
input.readMessage(
com.google.cloud.datalabeling.v1beta1.Dataset.parser(), extensionRegistry);
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
datasets_.add(m);
} else {
datasetsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.datalabeling.v1beta1.Dataset> datasets_ =
java.util.Collections.emptyList();
private void ensureDatasetsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
datasets_ =
new java.util.ArrayList<com.google.cloud.datalabeling.v1beta1.Dataset>(datasets_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datalabeling.v1beta1.Dataset,
com.google.cloud.datalabeling.v1beta1.Dataset.Builder,
com.google.cloud.datalabeling.v1beta1.DatasetOrBuilder>
datasetsBuilder_;
/**
*
*
* <pre>
* The list of datasets to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Dataset datasets = 1;</code>
*/
public java.util.List<com.google.cloud.datalabeling.v1beta1.Dataset> getDatasetsList() {
if (datasetsBuilder_ == null) {
return java.util.Collections.unmodifiableList(datasets_);
} else {
return datasetsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of datasets to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Dataset datasets = 1;</code>
*/
public int getDatasetsCount() {
if (datasetsBuilder_ == null) {
return datasets_.size();
} else {
return datasetsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of datasets to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Dataset datasets = 1;</code>
*/
public com.google.cloud.datalabeling.v1beta1.Dataset getDatasets(int index) {
if (datasetsBuilder_ == null) {
return datasets_.get(index);
} else {
return datasetsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of datasets to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Dataset datasets = 1;</code>
*/
public Builder setDatasets(int index, com.google.cloud.datalabeling.v1beta1.Dataset value) {
if (datasetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDatasetsIsMutable();
datasets_.set(index, value);
onChanged();
} else {
datasetsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of datasets to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Dataset datasets = 1;</code>
*/
public Builder setDatasets(
int index, com.google.cloud.datalabeling.v1beta1.Dataset.Builder builderForValue) {
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
datasets_.set(index, builderForValue.build());
onChanged();
} else {
datasetsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of datasets to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Dataset datasets = 1;</code>
*/
public Builder addDatasets(com.google.cloud.datalabeling.v1beta1.Dataset value) {
if (datasetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDatasetsIsMutable();
datasets_.add(value);
onChanged();
} else {
datasetsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of datasets to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Dataset datasets = 1;</code>
*/
public Builder addDatasets(int index, com.google.cloud.datalabeling.v1beta1.Dataset value) {
if (datasetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDatasetsIsMutable();
datasets_.add(index, value);
onChanged();
} else {
datasetsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of datasets to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Dataset datasets = 1;</code>
*/
public Builder addDatasets(
com.google.cloud.datalabeling.v1beta1.Dataset.Builder builderForValue) {
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
datasets_.add(builderForValue.build());
onChanged();
} else {
datasetsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of datasets to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Dataset datasets = 1;</code>
*/
public Builder addDatasets(
int index, com.google.cloud.datalabeling.v1beta1.Dataset.Builder builderForValue) {
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
datasets_.add(index, builderForValue.build());
onChanged();
} else {
datasetsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of datasets to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Dataset datasets = 1;</code>
*/
public Builder addAllDatasets(
java.lang.Iterable<? extends com.google.cloud.datalabeling.v1beta1.Dataset> values) {
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, datasets_);
onChanged();
} else {
datasetsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of datasets to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Dataset datasets = 1;</code>
*/
public Builder clearDatasets() {
if (datasetsBuilder_ == null) {
datasets_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
datasetsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of datasets to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Dataset datasets = 1;</code>
*/
public Builder removeDatasets(int index) {
if (datasetsBuilder_ == null) {
ensureDatasetsIsMutable();
datasets_.remove(index);
onChanged();
} else {
datasetsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of datasets to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Dataset datasets = 1;</code>
*/
public com.google.cloud.datalabeling.v1beta1.Dataset.Builder getDatasetsBuilder(int index) {
return getDatasetsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of datasets to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Dataset datasets = 1;</code>
*/
public com.google.cloud.datalabeling.v1beta1.DatasetOrBuilder getDatasetsOrBuilder(int index) {
if (datasetsBuilder_ == null) {
return datasets_.get(index);
} else {
return datasetsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of datasets to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Dataset datasets = 1;</code>
*/
public java.util.List<? extends com.google.cloud.datalabeling.v1beta1.DatasetOrBuilder>
getDatasetsOrBuilderList() {
if (datasetsBuilder_ != null) {
return datasetsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(datasets_);
}
}
/**
*
*
* <pre>
* The list of datasets to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Dataset datasets = 1;</code>
*/
public com.google.cloud.datalabeling.v1beta1.Dataset.Builder addDatasetsBuilder() {
return getDatasetsFieldBuilder()
.addBuilder(com.google.cloud.datalabeling.v1beta1.Dataset.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of datasets to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Dataset datasets = 1;</code>
*/
public com.google.cloud.datalabeling.v1beta1.Dataset.Builder addDatasetsBuilder(int index) {
return getDatasetsFieldBuilder()
.addBuilder(index, com.google.cloud.datalabeling.v1beta1.Dataset.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of datasets to return.
* </pre>
*
* <code>repeated .google.cloud.datalabeling.v1beta1.Dataset datasets = 1;</code>
*/
public java.util.List<com.google.cloud.datalabeling.v1beta1.Dataset.Builder>
getDatasetsBuilderList() {
return getDatasetsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datalabeling.v1beta1.Dataset,
com.google.cloud.datalabeling.v1beta1.Dataset.Builder,
com.google.cloud.datalabeling.v1beta1.DatasetOrBuilder>
getDatasetsFieldBuilder() {
if (datasetsBuilder_ == null) {
datasetsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datalabeling.v1beta1.Dataset,
com.google.cloud.datalabeling.v1beta1.Dataset.Builder,
com.google.cloud.datalabeling.v1beta1.DatasetOrBuilder>(
datasets_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
datasets_ = null;
}
return datasetsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datalabeling.v1beta1.ListDatasetsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ListDatasetsResponse)
private static final com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse();
}
public static com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListDatasetsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListDatasetsResponse>() {
@java.lang.Override
public ListDatasetsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListDatasetsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListDatasetsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.ListDatasetsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.