repo_id
stringclasses
875 values
size
int64
974
38.9k
file_path
stringlengths
10
308
content
stringlengths
974
38.9k
googleapis/google-cloud-java
36,703
java-chronicle/grpc-google-cloud-chronicle-v1/src/main/java/com/google/cloud/chronicle/v1/EntityServiceGrpc.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.chronicle.v1; import static io.grpc.MethodDescriptor.generateFullMethodName; /** * * * <pre> * EntityService contains apis for finding entities. * </pre> */ @javax.annotation.Generated( value = "by gRPC proto compiler", comments = "Source: google/cloud/chronicle/v1/entity.proto") @io.grpc.stub.annotations.GrpcGenerated public final class EntityServiceGrpc { private EntityServiceGrpc() {} public static final java.lang.String SERVICE_NAME = "google.cloud.chronicle.v1.EntityService"; // Static method descriptors that strictly reflect the proto. private static volatile io.grpc.MethodDescriptor< com.google.cloud.chronicle.v1.GetWatchlistRequest, com.google.cloud.chronicle.v1.Watchlist> getGetWatchlistMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "GetWatchlist", requestType = com.google.cloud.chronicle.v1.GetWatchlistRequest.class, responseType = com.google.cloud.chronicle.v1.Watchlist.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.chronicle.v1.GetWatchlistRequest, com.google.cloud.chronicle.v1.Watchlist> getGetWatchlistMethod() { io.grpc.MethodDescriptor< com.google.cloud.chronicle.v1.GetWatchlistRequest, com.google.cloud.chronicle.v1.Watchlist> getGetWatchlistMethod; if ((getGetWatchlistMethod = EntityServiceGrpc.getGetWatchlistMethod) == null) { synchronized (EntityServiceGrpc.class) { if ((getGetWatchlistMethod = EntityServiceGrpc.getGetWatchlistMethod) == null) { EntityServiceGrpc.getGetWatchlistMethod = getGetWatchlistMethod = io.grpc.MethodDescriptor .<com.google.cloud.chronicle.v1.GetWatchlistRequest, com.google.cloud.chronicle.v1.Watchlist> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetWatchlist")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.chronicle.v1.GetWatchlistRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.chronicle.v1.Watchlist.getDefaultInstance())) .setSchemaDescriptor( new EntityServiceMethodDescriptorSupplier("GetWatchlist")) .build(); } } } return getGetWatchlistMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.chronicle.v1.ListWatchlistsRequest, com.google.cloud.chronicle.v1.ListWatchlistsResponse> getListWatchlistsMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "ListWatchlists", requestType = com.google.cloud.chronicle.v1.ListWatchlistsRequest.class, responseType = com.google.cloud.chronicle.v1.ListWatchlistsResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.chronicle.v1.ListWatchlistsRequest, com.google.cloud.chronicle.v1.ListWatchlistsResponse> getListWatchlistsMethod() { io.grpc.MethodDescriptor< com.google.cloud.chronicle.v1.ListWatchlistsRequest, com.google.cloud.chronicle.v1.ListWatchlistsResponse> getListWatchlistsMethod; if ((getListWatchlistsMethod = EntityServiceGrpc.getListWatchlistsMethod) == null) { synchronized (EntityServiceGrpc.class) { if ((getListWatchlistsMethod = EntityServiceGrpc.getListWatchlistsMethod) == null) { EntityServiceGrpc.getListWatchlistsMethod = getListWatchlistsMethod = io.grpc.MethodDescriptor .<com.google.cloud.chronicle.v1.ListWatchlistsRequest, com.google.cloud.chronicle.v1.ListWatchlistsResponse> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListWatchlists")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.chronicle.v1.ListWatchlistsRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.chronicle.v1.ListWatchlistsResponse .getDefaultInstance())) .setSchemaDescriptor( new EntityServiceMethodDescriptorSupplier("ListWatchlists")) .build(); } } } return getListWatchlistsMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.chronicle.v1.CreateWatchlistRequest, com.google.cloud.chronicle.v1.Watchlist> getCreateWatchlistMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "CreateWatchlist", requestType = com.google.cloud.chronicle.v1.CreateWatchlistRequest.class, responseType = com.google.cloud.chronicle.v1.Watchlist.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.chronicle.v1.CreateWatchlistRequest, com.google.cloud.chronicle.v1.Watchlist> getCreateWatchlistMethod() { io.grpc.MethodDescriptor< com.google.cloud.chronicle.v1.CreateWatchlistRequest, com.google.cloud.chronicle.v1.Watchlist> getCreateWatchlistMethod; if ((getCreateWatchlistMethod = EntityServiceGrpc.getCreateWatchlistMethod) == null) { synchronized (EntityServiceGrpc.class) { if ((getCreateWatchlistMethod = EntityServiceGrpc.getCreateWatchlistMethod) == null) { EntityServiceGrpc.getCreateWatchlistMethod = getCreateWatchlistMethod = io.grpc.MethodDescriptor .<com.google.cloud.chronicle.v1.CreateWatchlistRequest, com.google.cloud.chronicle.v1.Watchlist> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "CreateWatchlist")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.chronicle.v1.CreateWatchlistRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.chronicle.v1.Watchlist.getDefaultInstance())) .setSchemaDescriptor( new EntityServiceMethodDescriptorSupplier("CreateWatchlist")) .build(); } } } return getCreateWatchlistMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.chronicle.v1.UpdateWatchlistRequest, com.google.cloud.chronicle.v1.Watchlist> getUpdateWatchlistMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "UpdateWatchlist", requestType = com.google.cloud.chronicle.v1.UpdateWatchlistRequest.class, responseType = com.google.cloud.chronicle.v1.Watchlist.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.chronicle.v1.UpdateWatchlistRequest, com.google.cloud.chronicle.v1.Watchlist> getUpdateWatchlistMethod() { io.grpc.MethodDescriptor< com.google.cloud.chronicle.v1.UpdateWatchlistRequest, com.google.cloud.chronicle.v1.Watchlist> getUpdateWatchlistMethod; if ((getUpdateWatchlistMethod = EntityServiceGrpc.getUpdateWatchlistMethod) == null) { synchronized (EntityServiceGrpc.class) { if ((getUpdateWatchlistMethod = EntityServiceGrpc.getUpdateWatchlistMethod) == null) { EntityServiceGrpc.getUpdateWatchlistMethod = getUpdateWatchlistMethod = io.grpc.MethodDescriptor .<com.google.cloud.chronicle.v1.UpdateWatchlistRequest, com.google.cloud.chronicle.v1.Watchlist> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "UpdateWatchlist")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.chronicle.v1.UpdateWatchlistRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.chronicle.v1.Watchlist.getDefaultInstance())) .setSchemaDescriptor( new EntityServiceMethodDescriptorSupplier("UpdateWatchlist")) .build(); } } } return getUpdateWatchlistMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.chronicle.v1.DeleteWatchlistRequest, com.google.protobuf.Empty> getDeleteWatchlistMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "DeleteWatchlist", requestType = com.google.cloud.chronicle.v1.DeleteWatchlistRequest.class, responseType = com.google.protobuf.Empty.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.chronicle.v1.DeleteWatchlistRequest, com.google.protobuf.Empty> getDeleteWatchlistMethod() { io.grpc.MethodDescriptor< com.google.cloud.chronicle.v1.DeleteWatchlistRequest, com.google.protobuf.Empty> getDeleteWatchlistMethod; if ((getDeleteWatchlistMethod = EntityServiceGrpc.getDeleteWatchlistMethod) == null) { synchronized (EntityServiceGrpc.class) { if ((getDeleteWatchlistMethod = EntityServiceGrpc.getDeleteWatchlistMethod) == null) { EntityServiceGrpc.getDeleteWatchlistMethod = getDeleteWatchlistMethod = io.grpc.MethodDescriptor .<com.google.cloud.chronicle.v1.DeleteWatchlistRequest, com.google.protobuf.Empty> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "DeleteWatchlist")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.chronicle.v1.DeleteWatchlistRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.protobuf.Empty.getDefaultInstance())) .setSchemaDescriptor( new EntityServiceMethodDescriptorSupplier("DeleteWatchlist")) .build(); } } } return getDeleteWatchlistMethod; } /** Creates a new async stub that supports all call types for the service */ public static EntityServiceStub newStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<EntityServiceStub> factory = new io.grpc.stub.AbstractStub.StubFactory<EntityServiceStub>() { @java.lang.Override public EntityServiceStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new EntityServiceStub(channel, callOptions); } }; return EntityServiceStub.newStub(factory, channel); } /** Creates a new blocking-style stub that supports all types of calls on the service */ public static EntityServiceBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<EntityServiceBlockingV2Stub> factory = new io.grpc.stub.AbstractStub.StubFactory<EntityServiceBlockingV2Stub>() { @java.lang.Override public EntityServiceBlockingV2Stub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new EntityServiceBlockingV2Stub(channel, callOptions); } }; return EntityServiceBlockingV2Stub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static EntityServiceBlockingStub newBlockingStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<EntityServiceBlockingStub> factory = new io.grpc.stub.AbstractStub.StubFactory<EntityServiceBlockingStub>() { @java.lang.Override public EntityServiceBlockingStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new EntityServiceBlockingStub(channel, callOptions); } }; return EntityServiceBlockingStub.newStub(factory, channel); } /** Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static EntityServiceFutureStub newFutureStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<EntityServiceFutureStub> factory = new io.grpc.stub.AbstractStub.StubFactory<EntityServiceFutureStub>() { @java.lang.Override public EntityServiceFutureStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new EntityServiceFutureStub(channel, callOptions); } }; return EntityServiceFutureStub.newStub(factory, channel); } /** * * * <pre> * EntityService contains apis for finding entities. * </pre> */ public interface AsyncService { /** * * * <pre> * Gets watchlist details for the given watchlist ID. * </pre> */ default void getWatchlist( com.google.cloud.chronicle.v1.GetWatchlistRequest request, io.grpc.stub.StreamObserver<com.google.cloud.chronicle.v1.Watchlist> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getGetWatchlistMethod(), responseObserver); } /** * * * <pre> * Lists all watchlists for the given instance. * </pre> */ default void listWatchlists( com.google.cloud.chronicle.v1.ListWatchlistsRequest request, io.grpc.stub.StreamObserver<com.google.cloud.chronicle.v1.ListWatchlistsResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getListWatchlistsMethod(), responseObserver); } /** * * * <pre> * Creates a watchlist for the given instance. * Note that there can be at most 200 watchlists per instance. * </pre> */ default void createWatchlist( com.google.cloud.chronicle.v1.CreateWatchlistRequest request, io.grpc.stub.StreamObserver<com.google.cloud.chronicle.v1.Watchlist> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getCreateWatchlistMethod(), responseObserver); } /** * * * <pre> * Updates the watchlist for the given instance. * </pre> */ default void updateWatchlist( com.google.cloud.chronicle.v1.UpdateWatchlistRequest request, io.grpc.stub.StreamObserver<com.google.cloud.chronicle.v1.Watchlist> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getUpdateWatchlistMethod(), responseObserver); } /** * * * <pre> * Deletes the watchlist for the given instance. * </pre> */ default void deleteWatchlist( com.google.cloud.chronicle.v1.DeleteWatchlistRequest request, io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getDeleteWatchlistMethod(), responseObserver); } } /** * Base class for the server implementation of the service EntityService. * * <pre> * EntityService contains apis for finding entities. * </pre> */ public abstract static class EntityServiceImplBase implements io.grpc.BindableService, AsyncService { @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return EntityServiceGrpc.bindService(this); } } /** * A stub to allow clients to do asynchronous rpc calls to service EntityService. * * <pre> * EntityService contains apis for finding entities. * </pre> */ public static final class EntityServiceStub extends io.grpc.stub.AbstractAsyncStub<EntityServiceStub> { private EntityServiceStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected EntityServiceStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new EntityServiceStub(channel, callOptions); } /** * * * <pre> * Gets watchlist details for the given watchlist ID. * </pre> */ public void getWatchlist( com.google.cloud.chronicle.v1.GetWatchlistRequest request, io.grpc.stub.StreamObserver<com.google.cloud.chronicle.v1.Watchlist> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getGetWatchlistMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Lists all watchlists for the given instance. * </pre> */ public void listWatchlists( com.google.cloud.chronicle.v1.ListWatchlistsRequest request, io.grpc.stub.StreamObserver<com.google.cloud.chronicle.v1.ListWatchlistsResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getListWatchlistsMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Creates a watchlist for the given instance. * Note that there can be at most 200 watchlists per instance. * </pre> */ public void createWatchlist( com.google.cloud.chronicle.v1.CreateWatchlistRequest request, io.grpc.stub.StreamObserver<com.google.cloud.chronicle.v1.Watchlist> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getCreateWatchlistMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Updates the watchlist for the given instance. * </pre> */ public void updateWatchlist( com.google.cloud.chronicle.v1.UpdateWatchlistRequest request, io.grpc.stub.StreamObserver<com.google.cloud.chronicle.v1.Watchlist> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getUpdateWatchlistMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Deletes the watchlist for the given instance. * </pre> */ public void deleteWatchlist( com.google.cloud.chronicle.v1.DeleteWatchlistRequest request, io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getDeleteWatchlistMethod(), getCallOptions()), request, responseObserver); } } /** * A stub to allow clients to do synchronous rpc calls to service EntityService. * * <pre> * EntityService contains apis for finding entities. * </pre> */ public static final class EntityServiceBlockingV2Stub extends io.grpc.stub.AbstractBlockingStub<EntityServiceBlockingV2Stub> { private EntityServiceBlockingV2Stub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected EntityServiceBlockingV2Stub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new EntityServiceBlockingV2Stub(channel, callOptions); } /** * * * <pre> * Gets watchlist details for the given watchlist ID. * </pre> */ public com.google.cloud.chronicle.v1.Watchlist getWatchlist( com.google.cloud.chronicle.v1.GetWatchlistRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetWatchlistMethod(), getCallOptions(), request); } /** * * * <pre> * Lists all watchlists for the given instance. * </pre> */ public com.google.cloud.chronicle.v1.ListWatchlistsResponse listWatchlists( com.google.cloud.chronicle.v1.ListWatchlistsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListWatchlistsMethod(), getCallOptions(), request); } /** * * * <pre> * Creates a watchlist for the given instance. * Note that there can be at most 200 watchlists per instance. * </pre> */ public com.google.cloud.chronicle.v1.Watchlist createWatchlist( com.google.cloud.chronicle.v1.CreateWatchlistRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getCreateWatchlistMethod(), getCallOptions(), request); } /** * * * <pre> * Updates the watchlist for the given instance. * </pre> */ public com.google.cloud.chronicle.v1.Watchlist updateWatchlist( com.google.cloud.chronicle.v1.UpdateWatchlistRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getUpdateWatchlistMethod(), getCallOptions(), request); } /** * * * <pre> * Deletes the watchlist for the given instance. * </pre> */ public com.google.protobuf.Empty deleteWatchlist( com.google.cloud.chronicle.v1.DeleteWatchlistRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDeleteWatchlistMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do limited synchronous rpc calls to service EntityService. * * <pre> * EntityService contains apis for finding entities. * </pre> */ public static final class EntityServiceBlockingStub extends io.grpc.stub.AbstractBlockingStub<EntityServiceBlockingStub> { private EntityServiceBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected EntityServiceBlockingStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new EntityServiceBlockingStub(channel, callOptions); } /** * * * <pre> * Gets watchlist details for the given watchlist ID. * </pre> */ public com.google.cloud.chronicle.v1.Watchlist getWatchlist( com.google.cloud.chronicle.v1.GetWatchlistRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetWatchlistMethod(), getCallOptions(), request); } /** * * * <pre> * Lists all watchlists for the given instance. * </pre> */ public com.google.cloud.chronicle.v1.ListWatchlistsResponse listWatchlists( com.google.cloud.chronicle.v1.ListWatchlistsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListWatchlistsMethod(), getCallOptions(), request); } /** * * * <pre> * Creates a watchlist for the given instance. * Note that there can be at most 200 watchlists per instance. * </pre> */ public com.google.cloud.chronicle.v1.Watchlist createWatchlist( com.google.cloud.chronicle.v1.CreateWatchlistRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getCreateWatchlistMethod(), getCallOptions(), request); } /** * * * <pre> * Updates the watchlist for the given instance. * </pre> */ public com.google.cloud.chronicle.v1.Watchlist updateWatchlist( com.google.cloud.chronicle.v1.UpdateWatchlistRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getUpdateWatchlistMethod(), getCallOptions(), request); } /** * * * <pre> * Deletes the watchlist for the given instance. * </pre> */ public com.google.protobuf.Empty deleteWatchlist( com.google.cloud.chronicle.v1.DeleteWatchlistRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDeleteWatchlistMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do ListenableFuture-style rpc calls to service EntityService. * * <pre> * EntityService contains apis for finding entities. * </pre> */ public static final class EntityServiceFutureStub extends io.grpc.stub.AbstractFutureStub<EntityServiceFutureStub> { private EntityServiceFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected EntityServiceFutureStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new EntityServiceFutureStub(channel, callOptions); } /** * * * <pre> * Gets watchlist details for the given watchlist ID. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.chronicle.v1.Watchlist> getWatchlist(com.google.cloud.chronicle.v1.GetWatchlistRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getGetWatchlistMethod(), getCallOptions()), request); } /** * * * <pre> * Lists all watchlists for the given instance. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.chronicle.v1.ListWatchlistsResponse> listWatchlists(com.google.cloud.chronicle.v1.ListWatchlistsRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getListWatchlistsMethod(), getCallOptions()), request); } /** * * * <pre> * Creates a watchlist for the given instance. * Note that there can be at most 200 watchlists per instance. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.chronicle.v1.Watchlist> createWatchlist(com.google.cloud.chronicle.v1.CreateWatchlistRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getCreateWatchlistMethod(), getCallOptions()), request); } /** * * * <pre> * Updates the watchlist for the given instance. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.chronicle.v1.Watchlist> updateWatchlist(com.google.cloud.chronicle.v1.UpdateWatchlistRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getUpdateWatchlistMethod(), getCallOptions()), request); } /** * * * <pre> * Deletes the watchlist for the given instance. * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.protobuf.Empty> deleteWatchlist(com.google.cloud.chronicle.v1.DeleteWatchlistRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getDeleteWatchlistMethod(), getCallOptions()), request); } } private static final int METHODID_GET_WATCHLIST = 0; private static final int METHODID_LIST_WATCHLISTS = 1; private static final int METHODID_CREATE_WATCHLIST = 2; private static final int METHODID_UPDATE_WATCHLIST = 3; private static final int METHODID_DELETE_WATCHLIST = 4; private static final class MethodHandlers<Req, Resp> implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>, io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> { private final AsyncService serviceImpl; private final int methodId; MethodHandlers(AsyncService serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { case METHODID_GET_WATCHLIST: serviceImpl.getWatchlist( (com.google.cloud.chronicle.v1.GetWatchlistRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.chronicle.v1.Watchlist>) responseObserver); break; case METHODID_LIST_WATCHLISTS: serviceImpl.listWatchlists( (com.google.cloud.chronicle.v1.ListWatchlistsRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.chronicle.v1.ListWatchlistsResponse>) responseObserver); break; case METHODID_CREATE_WATCHLIST: serviceImpl.createWatchlist( (com.google.cloud.chronicle.v1.CreateWatchlistRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.chronicle.v1.Watchlist>) responseObserver); break; case METHODID_UPDATE_WATCHLIST: serviceImpl.updateWatchlist( (com.google.cloud.chronicle.v1.UpdateWatchlistRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.chronicle.v1.Watchlist>) responseObserver); break; case METHODID_DELETE_WATCHLIST: serviceImpl.deleteWatchlist( (com.google.cloud.chronicle.v1.DeleteWatchlistRequest) request, (io.grpc.stub.StreamObserver<com.google.protobuf.Empty>) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver<Req> invoke( io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { default: throw new AssertionError(); } } } public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getGetWatchlistMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.chronicle.v1.GetWatchlistRequest, com.google.cloud.chronicle.v1.Watchlist>(service, METHODID_GET_WATCHLIST))) .addMethod( getListWatchlistsMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.chronicle.v1.ListWatchlistsRequest, com.google.cloud.chronicle.v1.ListWatchlistsResponse>( service, METHODID_LIST_WATCHLISTS))) .addMethod( getCreateWatchlistMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.chronicle.v1.CreateWatchlistRequest, com.google.cloud.chronicle.v1.Watchlist>(service, METHODID_CREATE_WATCHLIST))) .addMethod( getUpdateWatchlistMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.chronicle.v1.UpdateWatchlistRequest, com.google.cloud.chronicle.v1.Watchlist>(service, METHODID_UPDATE_WATCHLIST))) .addMethod( getDeleteWatchlistMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.chronicle.v1.DeleteWatchlistRequest, com.google.protobuf.Empty>(service, METHODID_DELETE_WATCHLIST))) .build(); } private abstract static class EntityServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { EntityServiceBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return com.google.cloud.chronicle.v1.EntityProto.getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("EntityService"); } } private static final class EntityServiceFileDescriptorSupplier extends EntityServiceBaseDescriptorSupplier { EntityServiceFileDescriptorSupplier() {} } private static final class EntityServiceMethodDescriptorSupplier extends EntityServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final java.lang.String methodName; EntityServiceMethodDescriptorSupplier(java.lang.String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (EntityServiceGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new EntityServiceFileDescriptorSupplier()) .addMethod(getGetWatchlistMethod()) .addMethod(getListWatchlistsMethod()) .addMethod(getCreateWatchlistMethod()) .addMethod(getUpdateWatchlistMethod()) .addMethod(getDeleteWatchlistMethod()) .build(); } } } return result; } }
apache/rya
36,222
extras/indexing/src/test/java/org/apache/rya/indexing/external/tupleSet/AccumuloIndexSetTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.rya.indexing.external.tupleSet; import java.math.BigInteger; import java.net.UnknownHostException; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.apache.accumulo.core.client.AccumuloException; import org.apache.accumulo.core.client.AccumuloSecurityException; import org.apache.accumulo.core.client.Connector; import org.apache.accumulo.core.client.TableNotFoundException; import org.apache.accumulo.core.client.admin.TableOperations; import org.apache.hadoop.conf.Configuration; import org.apache.rya.accumulo.AccumuloRdfConfiguration; import org.apache.rya.api.RdfCloudTripleStoreConfiguration; import org.apache.rya.api.persist.RyaDAOException; import org.apache.rya.api.resolver.RyaTypeResolverException; import org.apache.rya.indexing.accumulo.ConfigUtils; import org.apache.rya.indexing.external.PcjIntegrationTestingUtil; import org.apache.rya.indexing.pcj.matching.QueryVariableNormalizer; import org.apache.rya.indexing.pcj.storage.PcjException; import org.apache.rya.indexing.pcj.storage.accumulo.PcjTableNameFactory; import org.apache.rya.rdftriplestore.RyaSailRepository; import org.apache.rya.rdftriplestore.inference.InferenceEngineException; import org.apache.rya.sail.config.RyaSailFactory; import org.eclipse.rdf4j.common.iteration.CloseableIteration; import org.eclipse.rdf4j.model.Statement; import org.eclipse.rdf4j.model.ValueFactory; import org.eclipse.rdf4j.model.impl.SimpleValueFactory; import org.eclipse.rdf4j.query.BindingSet; import org.eclipse.rdf4j.query.MalformedQueryException; import org.eclipse.rdf4j.query.QueryEvaluationException; import org.eclipse.rdf4j.query.algebra.Projection; import org.eclipse.rdf4j.query.algebra.evaluation.QueryBindingSet; import org.eclipse.rdf4j.query.parser.ParsedQuery; import org.eclipse.rdf4j.query.parser.sparql.SPARQLParser; import org.eclipse.rdf4j.repository.RepositoryConnection; import org.eclipse.rdf4j.repository.RepositoryException; import org.eclipse.rdf4j.sail.SailException; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import com.google.common.base.Optional; import com.google.common.collect.Lists; import com.google.common.collect.Sets; public class AccumuloIndexSetTest { protected static Connector accumuloConn = null; protected RyaSailRepository ryaRepo = null; protected RepositoryConnection ryaConn = null; protected Configuration conf = getConf(); protected String prefix = "rya_"; private static final ValueFactory VF = SimpleValueFactory.getInstance(); @Before public void init() throws AccumuloException, AccumuloSecurityException, RyaDAOException, RepositoryException, TableNotFoundException, InferenceEngineException, NumberFormatException, UnknownHostException, SailException { accumuloConn = ConfigUtils.getConnector(conf); final TableOperations ops = accumuloConn.tableOperations(); if(ops.exists(prefix+"INDEX_"+ "testPcj")) { ops.delete(prefix+"INDEX_"+ "testPcj"); } ryaRepo = new RyaSailRepository(RyaSailFactory.getInstance(conf)); ryaConn = ryaRepo.getConnection(); } /** * TODO doc * @throws QueryEvaluationException * @throws SailException * @throws MalformedQueryException * @throws AccumuloSecurityException * @throws AccumuloException */ @Test public void accumuloIndexSetTestWithEmptyBindingSet() throws RepositoryException, PcjException, TableNotFoundException, RyaTypeResolverException, MalformedQueryException, SailException, QueryEvaluationException, AccumuloException, AccumuloSecurityException { // Load some Triples into Rya. final Set<Statement> triples = new HashSet<>(); triples.add( VF.createStatement(VF.createIRI("http://Alice"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(14))) ); triples.add( VF.createStatement(VF.createIRI("http://Alice"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); triples.add( VF.createStatement(VF.createIRI("http://Bob"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(16))) ); triples.add( VF.createStatement(VF.createIRI("http://Bob"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); triples.add( VF.createStatement(VF.createIRI("http://Charlie"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(12))) ); triples.add( VF.createStatement(VF.createIRI("http://Charlie"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); triples.add( VF.createStatement(VF.createIRI("http://Eve"), VF.createIRI("http://hasAge"), VF.createLiteral(43)) ); triples.add( VF.createStatement(VF.createIRI("http://Eve"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); for(final Statement triple : triples) { ryaConn.add(triple); } // Create a PCJ table will include those triples in its results. final String sparql = "SELECT ?name ?age " + "{" + "FILTER(?age < 30) ." + "?name <http://hasAge> ?age." + "?name <http://playsSport> \"Soccer\" " + "}"; final String pcjTableName = new PcjTableNameFactory().makeTableName(prefix, "testPcj"); // Create and populate the PCJ table. PcjIntegrationTestingUtil.createAndPopulatePcj(ryaConn, accumuloConn, pcjTableName, sparql, new String[]{"name", "age"}, Optional.absent()); final AccumuloIndexSet ais = new AccumuloIndexSet(conf, pcjTableName); final CloseableIteration<BindingSet, QueryEvaluationException> results = ais.evaluate(new QueryBindingSet()); final Set<BindingSet> fetchedResults = new HashSet<BindingSet>(); while(results.hasNext()) { fetchedResults.add(results.next()); } // Ensure the expected results match those that were stored. final QueryBindingSet alice = new QueryBindingSet(); alice.addBinding("name", VF.createIRI("http://Alice")); alice.addBinding("age", VF.createLiteral(BigInteger.valueOf(14))); final QueryBindingSet bob = new QueryBindingSet(); bob.addBinding("name", VF.createIRI("http://Bob")); bob.addBinding("age", VF.createLiteral(BigInteger.valueOf(16))); final QueryBindingSet charlie = new QueryBindingSet(); charlie.addBinding("name", VF.createIRI("http://Charlie")); charlie.addBinding("age", VF.createLiteral(BigInteger.valueOf(12))); final Set<BindingSet> expectedResults = Sets.newHashSet(alice, bob, charlie); Assert.assertEquals(expectedResults, fetchedResults); } /** * TODO doc * @throws QueryEvaluationException * @throws SailException * @throws MalformedQueryException * @throws AccumuloSecurityException * @throws AccumuloException */ @Test public void accumuloIndexSetTestWithBindingSet() throws RepositoryException, PcjException, TableNotFoundException, RyaTypeResolverException, MalformedQueryException, SailException, QueryEvaluationException, AccumuloException, AccumuloSecurityException { // Load some Triples into Rya. final Set<Statement> triples = new HashSet<>(); triples.add( VF.createStatement(VF.createIRI("http://Alice"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(14))) ); triples.add( VF.createStatement(VF.createIRI("http://Alice"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); triples.add( VF.createStatement(VF.createIRI("http://Bob"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(16))) ); triples.add( VF.createStatement(VF.createIRI("http://Bob"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); triples.add( VF.createStatement(VF.createIRI("http://Charlie"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(12))) ); triples.add( VF.createStatement(VF.createIRI("http://Charlie"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); triples.add( VF.createStatement(VF.createIRI("http://Eve"), VF.createIRI("http://hasAge"), VF.createLiteral(43)) ); triples.add( VF.createStatement(VF.createIRI("http://Eve"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); for(final Statement triple : triples) { ryaConn.add(triple); } // Create a PCJ table will include those triples in its results. final String sparql = "SELECT ?name ?age " + "{" + "FILTER(?age < 30) ." + "?name <http://hasAge> ?age." + "?name <http://playsSport> \"Soccer\" " + "}"; final String pcjTableName = new PcjTableNameFactory().makeTableName(prefix, "testPcj"); // Create and populate the PCJ table. PcjIntegrationTestingUtil.createAndPopulatePcj(ryaConn, accumuloConn, pcjTableName, sparql, new String[]{"name", "age"}, Optional.absent()); final AccumuloIndexSet ais = new AccumuloIndexSet(conf, pcjTableName); final QueryBindingSet bs = new QueryBindingSet(); bs.addBinding("name",VF.createIRI("http://Alice")); bs.addBinding("location",VF.createIRI("http://Virginia")); final CloseableIteration<BindingSet, QueryEvaluationException> results = ais.evaluate(bs); bs.addBinding("age",VF.createLiteral(BigInteger.valueOf(14))); Assert.assertEquals(bs, results.next()); } @Test public void accumuloIndexSetTestWithTwoBindingSets() throws RepositoryException, PcjException, TableNotFoundException, RyaTypeResolverException, MalformedQueryException, SailException, QueryEvaluationException, AccumuloException, AccumuloSecurityException { // Load some Triples into Rya. final Set<Statement> triples = new HashSet<>(); triples.add( VF.createStatement(VF.createIRI("http://Alice"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(14))) ); triples.add( VF.createStatement(VF.createIRI("http://Alice"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); triples.add( VF.createStatement(VF.createIRI("http://Bob"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(16))) ); triples.add( VF.createStatement(VF.createIRI("http://Bob"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); triples.add( VF.createStatement(VF.createIRI("http://Charlie"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(12))) ); triples.add( VF.createStatement(VF.createIRI("http://Charlie"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); triples.add( VF.createStatement(VF.createIRI("http://Eve"), VF.createIRI("http://hasAge"), VF.createLiteral(43)) ); triples.add( VF.createStatement(VF.createIRI("http://Eve"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); for(final Statement triple : triples) { ryaConn.add(triple); } // Create a PCJ table will include those triples in its results. final String sparql = "SELECT ?name ?age " + "{" + "FILTER(?age < 30) ." + "?name <http://hasAge> ?age." + "?name <http://playsSport> \"Soccer\" " + "}"; final String pcjTableName = new PcjTableNameFactory().makeTableName(prefix, "testPcj"); // Create and populate the PCJ table. PcjIntegrationTestingUtil.createAndPopulatePcj(ryaConn, accumuloConn, pcjTableName, sparql, new String[]{"name", "age"}, Optional.absent()); final AccumuloIndexSet ais = new AccumuloIndexSet(conf, pcjTableName); final QueryBindingSet bs = new QueryBindingSet(); bs.addBinding("birthDate",VF.createLiteral("1983-03-17",VF.createIRI("http://www.w3.org/2001/XMLSchema#date"))); bs.addBinding("name",VF.createIRI("http://Alice")); final QueryBindingSet bs2 = new QueryBindingSet(); bs2.addBinding("birthDate",VF.createLiteral("1983-04-18",VF.createIRI("http://www.w3.org/2001/XMLSchema#date"))); bs2.addBinding("name",VF.createIRI("http://Bob")); final Set<BindingSet> bSets = Sets.newHashSet(bs,bs2); final CloseableIteration<BindingSet, QueryEvaluationException> results = ais.evaluate(bSets); final QueryBindingSet alice = new QueryBindingSet(); alice.addBinding("name", VF.createIRI("http://Alice")); alice.addBinding("age", VF.createLiteral(BigInteger.valueOf(14))); alice.addBinding("birthDate", VF.createLiteral("1983-03-17",VF.createIRI("http://www.w3.org/2001/XMLSchema#date"))); final QueryBindingSet bob = new QueryBindingSet(); bob.addBinding("name", VF.createIRI("http://Bob")); bob.addBinding("age", VF.createLiteral(BigInteger.valueOf(16))); bob.addBinding("birthDate", VF.createLiteral("1983-04-18",VF.createIRI("http://www.w3.org/2001/XMLSchema#date"))); final Set<BindingSet> fetchedResults = new HashSet<>(); while(results.hasNext()) { final BindingSet next = results.next(); System.out.println(next); fetchedResults.add(next); } Assert.assertEquals(Sets.<BindingSet>newHashSet(alice,bob), fetchedResults); } @Test public void accumuloIndexSetTestWithNoBindingSet() throws RepositoryException, PcjException, TableNotFoundException, RyaTypeResolverException, MalformedQueryException, SailException, QueryEvaluationException, AccumuloException, AccumuloSecurityException { // Load some Triples into Rya. final Set<Statement> triples = new HashSet<>(); triples.add( VF.createStatement(VF.createIRI("http://Alice"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(14))) ); triples.add( VF.createStatement(VF.createIRI("http://Alice"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); triples.add( VF.createStatement(VF.createIRI("http://Bob"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(16))) ); triples.add( VF.createStatement(VF.createIRI("http://Bob"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); triples.add( VF.createStatement(VF.createIRI("http://Charlie"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(12))) ); triples.add( VF.createStatement(VF.createIRI("http://Charlie"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); triples.add( VF.createStatement(VF.createIRI("http://Eve"), VF.createIRI("http://hasAge"), VF.createLiteral(43)) ); triples.add( VF.createStatement(VF.createIRI("http://Eve"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); for(final Statement triple : triples) { ryaConn.add(triple); } // Create a PCJ table will include those triples in its results. final String sparql = "SELECT ?name ?age " + "{" + "FILTER(?age < 30) ." + "?name <http://hasAge> ?age." + "?name <http://playsSport> \"Soccer\" " + "}"; final String pcjTableName = new PcjTableNameFactory().makeTableName(prefix, "testPcj"); // Create and populate the PCJ table. PcjIntegrationTestingUtil.createAndPopulatePcj(ryaConn, accumuloConn, pcjTableName, sparql, new String[]{"name", "age"}, Optional.absent()); final AccumuloIndexSet ais = new AccumuloIndexSet(conf, pcjTableName); final CloseableIteration<BindingSet, QueryEvaluationException> results = ais.evaluate(new HashSet<BindingSet>()); Assert.assertEquals(false, results.hasNext()); } @Test public void accumuloIndexSetTestWithDirectProductBindingSet() throws RepositoryException, PcjException, TableNotFoundException, RyaTypeResolverException, MalformedQueryException, SailException, QueryEvaluationException, AccumuloException, AccumuloSecurityException { // Load some Triples into Rya. final Set<Statement> triples = new HashSet<>(); triples.add( VF.createStatement(VF.createIRI("http://Alice"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(14))) ); triples.add( VF.createStatement(VF.createIRI("http://Alice"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); triples.add( VF.createStatement(VF.createIRI("http://Bob"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(16))) ); triples.add( VF.createStatement(VF.createIRI("http://Bob"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); triples.add( VF.createStatement(VF.createIRI("http://Charlie"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(12))) ); triples.add( VF.createStatement(VF.createIRI("http://Charlie"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); triples.add( VF.createStatement(VF.createIRI("http://Eve"), VF.createIRI("http://hasAge"), VF.createLiteral(43)) ); triples.add( VF.createStatement(VF.createIRI("http://Eve"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); for(final Statement triple : triples) { ryaConn.add(triple); } // Create a PCJ table will include those triples in its results. final String sparql = "SELECT ?name ?age " + "{" + "FILTER(?age < 30) ." + "?name <http://hasAge> ?age." + "?name <http://playsSport> \"Soccer\" " + "}"; final String pcjTableName = new PcjTableNameFactory().makeTableName(prefix, "testPcj"); // Create and populate the PCJ table. PcjIntegrationTestingUtil.createAndPopulatePcj(ryaConn, accumuloConn, pcjTableName, sparql, new String[]{"name", "age"}, Optional.absent()); final AccumuloIndexSet ais = new AccumuloIndexSet(conf, pcjTableName); final QueryBindingSet bs = new QueryBindingSet(); bs.addBinding("birthDate",VF.createLiteral("1983-03-17",VF.createIRI("http://www.w3.org/2001/XMLSchema#date"))); bs.addBinding("location",VF.createIRI("http://Virginia")); final CloseableIteration<BindingSet, QueryEvaluationException> results = ais.evaluate(bs); final QueryBindingSet alice = new QueryBindingSet(); alice.addBinding("name", VF.createIRI("http://Alice")); alice.addBinding("age", VF.createLiteral(BigInteger.valueOf(14))); alice.addAll(bs); final QueryBindingSet bob = new QueryBindingSet(); bob.addBinding("name", VF.createIRI("http://Bob")); bob.addBinding("age", VF.createLiteral(BigInteger.valueOf(16))); bob.addAll(bs); final QueryBindingSet charlie = new QueryBindingSet(); charlie.addBinding("name", VF.createIRI("http://Charlie")); charlie.addBinding("age", VF.createLiteral(BigInteger.valueOf(12))); charlie.addAll(bs); final Set<BindingSet> fetchedResults = new HashSet<>(); while(results.hasNext()) { fetchedResults.add(results.next()); } Assert.assertEquals(3,fetchedResults.size()); Assert.assertEquals(Sets.<BindingSet>newHashSet(alice,bob,charlie), fetchedResults); } @Test public void accumuloIndexSetTestWithTwoDirectProductBindingSet() throws RepositoryException, PcjException, TableNotFoundException, RyaTypeResolverException, MalformedQueryException, SailException, QueryEvaluationException, AccumuloException, AccumuloSecurityException { // Load some Triples into Rya. final Set<Statement> triples = new HashSet<>(); triples.add( VF.createStatement(VF.createIRI("http://Alice"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(14))) ); triples.add( VF.createStatement(VF.createIRI("http://Alice"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); triples.add( VF.createStatement(VF.createIRI("http://Bob"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(16))) ); triples.add( VF.createStatement(VF.createIRI("http://Bob"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); triples.add( VF.createStatement(VF.createIRI("http://Charlie"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(12))) ); triples.add( VF.createStatement(VF.createIRI("http://Charlie"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); triples.add( VF.createStatement(VF.createIRI("http://Eve"), VF.createIRI("http://hasAge"), VF.createLiteral(43)) ); triples.add( VF.createStatement(VF.createIRI("http://Eve"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); for(final Statement triple : triples) { ryaConn.add(triple); } // Create a PCJ table will include those triples in its results. final String sparql = "SELECT ?name ?age " + "{" + "FILTER(?age < 30) ." + "?name <http://hasAge> ?age." + "?name <http://playsSport> \"Soccer\" " + "}"; final String pcjTableName = new PcjTableNameFactory().makeTableName(prefix, "testPcj"); // Create and populate the PCJ table. PcjIntegrationTestingUtil.createAndPopulatePcj(ryaConn, accumuloConn, pcjTableName, sparql, new String[]{"name", "age"}, Optional.absent()); final AccumuloIndexSet ais = new AccumuloIndexSet(conf, pcjTableName); final QueryBindingSet bs = new QueryBindingSet(); bs.addBinding("birthDate",VF.createLiteral("1983-03-17",VF.createIRI("http://www.w3.org/2001/XMLSchema#date"))); bs.addBinding("location",VF.createIRI("http://Virginia")); final QueryBindingSet bs2 = new QueryBindingSet(); bs2.addBinding("birthDate",VF.createLiteral("1983-04-18",VF.createIRI("http://www.w3.org/2001/XMLSchema#date"))); bs2.addBinding("location",VF.createIRI("http://Georgia")); final Set<BindingSet> bSets = Sets.newHashSet(bs,bs2); final CloseableIteration<BindingSet, QueryEvaluationException> results = ais.evaluate(bSets); final QueryBindingSet alice1 = new QueryBindingSet(); alice1.addBinding("name", VF.createIRI("http://Alice")); alice1.addBinding("age", VF.createLiteral(BigInteger.valueOf(14))); alice1.addAll(bs); final QueryBindingSet bob1 = new QueryBindingSet(); bob1.addBinding("name", VF.createIRI("http://Bob")); bob1.addBinding("age", VF.createLiteral(BigInteger.valueOf(16))); bob1.addAll(bs); final QueryBindingSet charlie1 = new QueryBindingSet(); charlie1.addBinding("name", VF.createIRI("http://Charlie")); charlie1.addBinding("age", VF.createLiteral(BigInteger.valueOf(12))); charlie1.addAll(bs); final QueryBindingSet alice2 = new QueryBindingSet(); alice2.addBinding("name", VF.createIRI("http://Alice")); alice2.addBinding("age", VF.createLiteral(BigInteger.valueOf(14))); alice2.addAll(bs2); final QueryBindingSet bob2 = new QueryBindingSet(); bob2.addBinding("name", VF.createIRI("http://Bob")); bob2.addBinding("age", VF.createLiteral(BigInteger.valueOf(16))); bob2.addAll(bs2); final QueryBindingSet charlie2 = new QueryBindingSet(); charlie2.addBinding("name", VF.createIRI("http://Charlie")); charlie2.addBinding("age", VF.createLiteral(BigInteger.valueOf(12))); charlie2.addAll(bs2); final Set<BindingSet> fetchedResults = new HashSet<>(); while(results.hasNext()) { final BindingSet next = results.next(); System.out.println(next); fetchedResults.add(next); } Assert.assertEquals(Sets.<BindingSet>newHashSet(alice1,bob1,charlie1,alice2,bob2,charlie2), fetchedResults); } @Test public void accumuloIndexSetTestWithTwoDirectProductBindingSetsWithMapping() throws RepositoryException, PcjException, TableNotFoundException, RyaTypeResolverException, MalformedQueryException, SailException, QueryEvaluationException, AccumuloException, AccumuloSecurityException { // Load some Triples into Rya. final Set<Statement> triples = new HashSet<>(); triples.add( VF.createStatement(VF.createIRI("http://Alice"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(14))) ); triples.add( VF.createStatement(VF.createIRI("http://Alice"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); triples.add( VF.createStatement(VF.createIRI("http://Bob"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(16))) ); triples.add( VF.createStatement(VF.createIRI("http://Bob"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); triples.add( VF.createStatement(VF.createIRI("http://Charlie"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(12))) ); triples.add( VF.createStatement(VF.createIRI("http://Charlie"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); triples.add( VF.createStatement(VF.createIRI("http://Eve"), VF.createIRI("http://hasAge"), VF.createLiteral(43)) ); triples.add( VF.createStatement(VF.createIRI("http://Eve"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); for(final Statement triple : triples) { ryaConn.add(triple); } // Create a PCJ table will include those triples in its results. final String sparql = "SELECT ?name ?age " + "{" + "FILTER(?age < 30) ." + "?name <http://hasAge> ?age." + "?name <http://playsSport> \"Soccer\" " + "}"; final String pcjTableName = new PcjTableNameFactory().makeTableName(prefix, "testPcj"); // Create and populate the PCJ table. PcjIntegrationTestingUtil.createAndPopulatePcj(ryaConn, accumuloConn, pcjTableName, sparql, new String[]{"name", "age"}, Optional.absent()); final String sparql2 = "SELECT ?x ?y " + "{" + "FILTER(?y < 30) ." + "?x <http://hasAge> ?y." + "?x <http://playsSport> \"Soccer\" " + "}"; final SPARQLParser p = new SPARQLParser(); final ParsedQuery pq = p.parseQuery(sparql2, null); final Map<String,String> map = new HashMap<>(); map.put("x", "name"); map.put("y", "age"); final AccumuloIndexSet ais = new AccumuloIndexSet(conf, pcjTableName); ais.setProjectionExpr((Projection) pq.getTupleExpr()); ais.setTableVarMap(map); ais.setSupportedVariableOrderMap(Lists.newArrayList("x;y","y;x")); final QueryBindingSet bs = new QueryBindingSet(); bs.addBinding("birthDate",VF.createLiteral("1983-03-17",VF.createIRI("http://www.w3.org/2001/XMLSchema#date"))); bs.addBinding("x",VF.createIRI("http://Alice")); final QueryBindingSet bs2 = new QueryBindingSet(); bs2.addBinding("birthDate",VF.createLiteral("1983-04-18",VF.createIRI("http://www.w3.org/2001/XMLSchema#date"))); bs2.addBinding("x",VF.createIRI("http://Bob")); final Set<BindingSet> bSets = Sets.newHashSet(bs,bs2); final CloseableIteration<BindingSet, QueryEvaluationException> results = ais.evaluate(bSets); final QueryBindingSet alice = new QueryBindingSet(); alice.addBinding("x", VF.createIRI("http://Alice")); alice.addBinding("y", VF.createLiteral(BigInteger.valueOf(14))); alice.addBinding("birthDate", VF.createLiteral("1983-03-17",VF.createIRI("http://www.w3.org/2001/XMLSchema#date"))); final QueryBindingSet bob = new QueryBindingSet(); bob.addBinding("x", VF.createIRI("http://Bob")); bob.addBinding("y", VF.createLiteral(BigInteger.valueOf(16))); bob.addBinding("birthDate", VF.createLiteral("1983-04-18",VF.createIRI("http://www.w3.org/2001/XMLSchema#date"))); final Set<BindingSet> fetchedResults = new HashSet<>(); while(results.hasNext()) { final BindingSet next = results.next(); System.out.println(next); fetchedResults.add(next); } Assert.assertEquals(Sets.<BindingSet>newHashSet(alice,bob), fetchedResults); } @Test public void accumuloIndexSetTestWithTwoDirectProductBindingSetsWithConstantMapping() throws Exception { // Load some Triples into Rya. final Set<Statement> triples = new HashSet<>(); triples.add( VF.createStatement(VF.createIRI("http://Alice"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(14))) ); triples.add( VF.createStatement(VF.createIRI("http://Alice"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); triples.add( VF.createStatement(VF.createIRI("http://Bob"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(16))) ); triples.add( VF.createStatement(VF.createIRI("http://Bob"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); triples.add( VF.createStatement(VF.createIRI("http://Charlie"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(12))) ); triples.add( VF.createStatement(VF.createIRI("http://Charlie"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); triples.add( VF.createStatement(VF.createIRI("http://Eve"), VF.createIRI("http://hasAge"), VF.createLiteral(43)) ); triples.add( VF.createStatement(VF.createIRI("http://Eve"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); for(final Statement triple : triples) { ryaConn.add(triple); } // Create a PCJ table will include those triples in its results. final String sparql = "SELECT ?name ?age " + "{" + "?name <http://hasAge> ?age." + "?name <http://playsSport> \"Soccer\" " + "}"; final String pcjTableName = new PcjTableNameFactory().makeTableName(prefix, "testPcj"); // Create and populate the PCJ table. PcjIntegrationTestingUtil.createAndPopulatePcj(ryaConn, accumuloConn, pcjTableName, sparql, new String[]{"name", "age"}, Optional.absent()); final String sparql2 = "SELECT ?x " + "{" + "?x <http://hasAge> 16 ." + "?x <http://playsSport> \"Soccer\" " + "}"; final SPARQLParser p = new SPARQLParser(); final ParsedQuery pq1 = p.parseQuery(sparql, null); final ParsedQuery pq2 = p.parseQuery(sparql2, null); final AccumuloIndexSet ais = new AccumuloIndexSet(conf, pcjTableName); ais.setProjectionExpr((Projection) QueryVariableNormalizer.getNormalizedIndex(pq2.getTupleExpr(), pq1.getTupleExpr()).get(0)); final QueryBindingSet bs = new QueryBindingSet(); bs.addBinding("birthDate",VF.createLiteral("1983-03-17",VF.createIRI("http://www.w3.org/2001/XMLSchema#date"))); bs.addBinding("x",VF.createIRI("http://Alice")); final QueryBindingSet bs2 = new QueryBindingSet(); bs2.addBinding("birthDate",VF.createLiteral("1983-04-18",VF.createIRI("http://www.w3.org/2001/XMLSchema#date"))); bs2.addBinding("x",VF.createIRI("http://Bob")); final Set<BindingSet> bSets = Sets.newHashSet(bs,bs2); final CloseableIteration<BindingSet, QueryEvaluationException> results = ais.evaluate(bSets); final Set<BindingSet> fetchedResults = new HashSet<>(); while(results.hasNext()) { final BindingSet next = results.next(); fetchedResults.add(next); } Assert.assertEquals(Sets.<BindingSet>newHashSet(bs2), fetchedResults); } @Test public void accumuloIndexSetTestAttemptJoinAcrossTypes() throws Exception { // Load some Triples into Rya. final Set<Statement> triples = new HashSet<>(); triples.add( VF.createStatement(VF.createIRI("http://Alice"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(14))) ); triples.add( VF.createStatement(VF.createIRI("http://Alice"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); triples.add( VF.createStatement(VF.createIRI("http://Bob"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(16))) ); triples.add( VF.createStatement(VF.createIRI("http://Bob"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) ); for(final Statement triple : triples) { ryaConn.add(triple); } // Create a PCJ table will include those triples in its results. final String sparql = "SELECT ?name ?age " + "{" + "?name <http://hasAge> ?age." + "?name <http://playsSport> \"Soccer\" " + "}"; final String pcjTableName = new PcjTableNameFactory().makeTableName(prefix, "testPcj"); // Create and populate the PCJ table. PcjIntegrationTestingUtil.createAndPopulatePcj(ryaConn, accumuloConn, pcjTableName, sparql, new String[]{"name", "age"}, Optional.absent()); final AccumuloIndexSet ais = new AccumuloIndexSet(conf,pcjTableName); final QueryBindingSet bs1 = new QueryBindingSet(); bs1.addBinding("age", VF.createLiteral("16")); final QueryBindingSet bs2 = new QueryBindingSet(); bs2.addBinding("age", VF.createLiteral(BigInteger.valueOf(14))); final Set<BindingSet> bSets = Sets.newHashSet(bs1,bs2); final CloseableIteration<BindingSet, QueryEvaluationException> results = ais.evaluate(bSets); final Set<BindingSet> fetchedResults = new HashSet<>(); while(results.hasNext()) { final BindingSet next = results.next(); fetchedResults.add(next); } bs2.addBinding("name", VF.createIRI("http://Alice")); Assert.assertEquals(Sets.<BindingSet>newHashSet(bs2), fetchedResults); } @After public void close() throws RepositoryException { ryaConn.close(); ryaRepo.shutDown(); } private static Configuration getConf() { final AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration(); conf.setBoolean(ConfigUtils.USE_MOCK_INSTANCE, true); conf.set(RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX, "rya_"); conf.set(ConfigUtils.CLOUDBASE_USER, "root"); conf.set(ConfigUtils.CLOUDBASE_PASSWORD, ""); conf.set(ConfigUtils.CLOUDBASE_INSTANCE, "instance"); conf.set(ConfigUtils.CLOUDBASE_AUTHS, ""); return conf; } }
googleads/google-ads-java
36,767
google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/common/DisplayUploadAdInfo.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v19/common/ad_type_infos.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v19.common; /** * <pre> * A generic type of display ad. The exact ad format is controlled by the * `display_upload_product_type` field, which determines what kinds of data * need to be included with the ad. * </pre> * * Protobuf type {@code google.ads.googleads.v19.common.DisplayUploadAdInfo} */ public final class DisplayUploadAdInfo extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v19.common.DisplayUploadAdInfo) DisplayUploadAdInfoOrBuilder { private static final long serialVersionUID = 0L; // Use DisplayUploadAdInfo.newBuilder() to construct. private DisplayUploadAdInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DisplayUploadAdInfo() { displayUploadProductType_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new DisplayUploadAdInfo(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.common.AdTypeInfosProto.internal_static_google_ads_googleads_v19_common_DisplayUploadAdInfo_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.common.AdTypeInfosProto.internal_static_google_ads_googleads_v19_common_DisplayUploadAdInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.common.DisplayUploadAdInfo.class, com.google.ads.googleads.v19.common.DisplayUploadAdInfo.Builder.class); } private int mediaAssetCase_ = 0; @SuppressWarnings("serial") private java.lang.Object mediaAsset_; public enum MediaAssetCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { MEDIA_BUNDLE(2), MEDIAASSET_NOT_SET(0); private final int value; private MediaAssetCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static MediaAssetCase valueOf(int value) { return forNumber(value); } public static MediaAssetCase forNumber(int value) { switch (value) { case 2: return MEDIA_BUNDLE; case 0: return MEDIAASSET_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public MediaAssetCase getMediaAssetCase() { return MediaAssetCase.forNumber( mediaAssetCase_); } public static final int DISPLAY_UPLOAD_PRODUCT_TYPE_FIELD_NUMBER = 1; private int displayUploadProductType_ = 0; /** * <pre> * The product type of this ad. See comments on the enum for details. * </pre> * * <code>.google.ads.googleads.v19.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType display_upload_product_type = 1;</code> * @return The enum numeric value on the wire for displayUploadProductType. */ @java.lang.Override public int getDisplayUploadProductTypeValue() { return displayUploadProductType_; } /** * <pre> * The product type of this ad. See comments on the enum for details. * </pre> * * <code>.google.ads.googleads.v19.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType display_upload_product_type = 1;</code> * @return The displayUploadProductType. */ @java.lang.Override public com.google.ads.googleads.v19.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType getDisplayUploadProductType() { com.google.ads.googleads.v19.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType result = com.google.ads.googleads.v19.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType.forNumber(displayUploadProductType_); return result == null ? com.google.ads.googleads.v19.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType.UNRECOGNIZED : result; } public static final int MEDIA_BUNDLE_FIELD_NUMBER = 2; /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v19.common.AdMediaBundleAsset media_bundle = 2;</code> * @return Whether the mediaBundle field is set. */ @java.lang.Override public boolean hasMediaBundle() { return mediaAssetCase_ == 2; } /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v19.common.AdMediaBundleAsset media_bundle = 2;</code> * @return The mediaBundle. */ @java.lang.Override public com.google.ads.googleads.v19.common.AdMediaBundleAsset getMediaBundle() { if (mediaAssetCase_ == 2) { return (com.google.ads.googleads.v19.common.AdMediaBundleAsset) mediaAsset_; } return com.google.ads.googleads.v19.common.AdMediaBundleAsset.getDefaultInstance(); } /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v19.common.AdMediaBundleAsset media_bundle = 2;</code> */ @java.lang.Override public com.google.ads.googleads.v19.common.AdMediaBundleAssetOrBuilder getMediaBundleOrBuilder() { if (mediaAssetCase_ == 2) { return (com.google.ads.googleads.v19.common.AdMediaBundleAsset) mediaAsset_; } return com.google.ads.googleads.v19.common.AdMediaBundleAsset.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (displayUploadProductType_ != com.google.ads.googleads.v19.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType.UNSPECIFIED.getNumber()) { output.writeEnum(1, displayUploadProductType_); } if (mediaAssetCase_ == 2) { output.writeMessage(2, (com.google.ads.googleads.v19.common.AdMediaBundleAsset) mediaAsset_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (displayUploadProductType_ != com.google.ads.googleads.v19.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType.UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(1, displayUploadProductType_); } if (mediaAssetCase_ == 2) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, (com.google.ads.googleads.v19.common.AdMediaBundleAsset) mediaAsset_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v19.common.DisplayUploadAdInfo)) { return super.equals(obj); } com.google.ads.googleads.v19.common.DisplayUploadAdInfo other = (com.google.ads.googleads.v19.common.DisplayUploadAdInfo) obj; if (displayUploadProductType_ != other.displayUploadProductType_) return false; if (!getMediaAssetCase().equals(other.getMediaAssetCase())) return false; switch (mediaAssetCase_) { case 2: if (!getMediaBundle() .equals(other.getMediaBundle())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + DISPLAY_UPLOAD_PRODUCT_TYPE_FIELD_NUMBER; hash = (53 * hash) + displayUploadProductType_; switch (mediaAssetCase_) { case 2: hash = (37 * hash) + MEDIA_BUNDLE_FIELD_NUMBER; hash = (53 * hash) + getMediaBundle().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v19.common.DisplayUploadAdInfo parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.common.DisplayUploadAdInfo parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.common.DisplayUploadAdInfo parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.common.DisplayUploadAdInfo parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.common.DisplayUploadAdInfo parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.common.DisplayUploadAdInfo parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.common.DisplayUploadAdInfo parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.common.DisplayUploadAdInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.common.DisplayUploadAdInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.common.DisplayUploadAdInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.common.DisplayUploadAdInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.common.DisplayUploadAdInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v19.common.DisplayUploadAdInfo prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * A generic type of display ad. The exact ad format is controlled by the * `display_upload_product_type` field, which determines what kinds of data * need to be included with the ad. * </pre> * * Protobuf type {@code google.ads.googleads.v19.common.DisplayUploadAdInfo} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.common.DisplayUploadAdInfo) com.google.ads.googleads.v19.common.DisplayUploadAdInfoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.common.AdTypeInfosProto.internal_static_google_ads_googleads_v19_common_DisplayUploadAdInfo_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.common.AdTypeInfosProto.internal_static_google_ads_googleads_v19_common_DisplayUploadAdInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.common.DisplayUploadAdInfo.class, com.google.ads.googleads.v19.common.DisplayUploadAdInfo.Builder.class); } // Construct using com.google.ads.googleads.v19.common.DisplayUploadAdInfo.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; displayUploadProductType_ = 0; if (mediaBundleBuilder_ != null) { mediaBundleBuilder_.clear(); } mediaAssetCase_ = 0; mediaAsset_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v19.common.AdTypeInfosProto.internal_static_google_ads_googleads_v19_common_DisplayUploadAdInfo_descriptor; } @java.lang.Override public com.google.ads.googleads.v19.common.DisplayUploadAdInfo getDefaultInstanceForType() { return com.google.ads.googleads.v19.common.DisplayUploadAdInfo.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v19.common.DisplayUploadAdInfo build() { com.google.ads.googleads.v19.common.DisplayUploadAdInfo result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v19.common.DisplayUploadAdInfo buildPartial() { com.google.ads.googleads.v19.common.DisplayUploadAdInfo result = new com.google.ads.googleads.v19.common.DisplayUploadAdInfo(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v19.common.DisplayUploadAdInfo result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.displayUploadProductType_ = displayUploadProductType_; } } private void buildPartialOneofs(com.google.ads.googleads.v19.common.DisplayUploadAdInfo result) { result.mediaAssetCase_ = mediaAssetCase_; result.mediaAsset_ = this.mediaAsset_; if (mediaAssetCase_ == 2 && mediaBundleBuilder_ != null) { result.mediaAsset_ = mediaBundleBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v19.common.DisplayUploadAdInfo) { return mergeFrom((com.google.ads.googleads.v19.common.DisplayUploadAdInfo)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v19.common.DisplayUploadAdInfo other) { if (other == com.google.ads.googleads.v19.common.DisplayUploadAdInfo.getDefaultInstance()) return this; if (other.displayUploadProductType_ != 0) { setDisplayUploadProductTypeValue(other.getDisplayUploadProductTypeValue()); } switch (other.getMediaAssetCase()) { case MEDIA_BUNDLE: { mergeMediaBundle(other.getMediaBundle()); break; } case MEDIAASSET_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { displayUploadProductType_ = input.readEnum(); bitField0_ |= 0x00000001; break; } // case 8 case 18: { input.readMessage( getMediaBundleFieldBuilder().getBuilder(), extensionRegistry); mediaAssetCase_ = 2; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int mediaAssetCase_ = 0; private java.lang.Object mediaAsset_; public MediaAssetCase getMediaAssetCase() { return MediaAssetCase.forNumber( mediaAssetCase_); } public Builder clearMediaAsset() { mediaAssetCase_ = 0; mediaAsset_ = null; onChanged(); return this; } private int bitField0_; private int displayUploadProductType_ = 0; /** * <pre> * The product type of this ad. See comments on the enum for details. * </pre> * * <code>.google.ads.googleads.v19.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType display_upload_product_type = 1;</code> * @return The enum numeric value on the wire for displayUploadProductType. */ @java.lang.Override public int getDisplayUploadProductTypeValue() { return displayUploadProductType_; } /** * <pre> * The product type of this ad. See comments on the enum for details. * </pre> * * <code>.google.ads.googleads.v19.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType display_upload_product_type = 1;</code> * @param value The enum numeric value on the wire for displayUploadProductType to set. * @return This builder for chaining. */ public Builder setDisplayUploadProductTypeValue(int value) { displayUploadProductType_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * The product type of this ad. See comments on the enum for details. * </pre> * * <code>.google.ads.googleads.v19.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType display_upload_product_type = 1;</code> * @return The displayUploadProductType. */ @java.lang.Override public com.google.ads.googleads.v19.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType getDisplayUploadProductType() { com.google.ads.googleads.v19.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType result = com.google.ads.googleads.v19.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType.forNumber(displayUploadProductType_); return result == null ? com.google.ads.googleads.v19.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType.UNRECOGNIZED : result; } /** * <pre> * The product type of this ad. See comments on the enum for details. * </pre> * * <code>.google.ads.googleads.v19.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType display_upload_product_type = 1;</code> * @param value The displayUploadProductType to set. * @return This builder for chaining. */ public Builder setDisplayUploadProductType(com.google.ads.googleads.v19.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; displayUploadProductType_ = value.getNumber(); onChanged(); return this; } /** * <pre> * The product type of this ad. See comments on the enum for details. * </pre> * * <code>.google.ads.googleads.v19.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType display_upload_product_type = 1;</code> * @return This builder for chaining. */ public Builder clearDisplayUploadProductType() { bitField0_ = (bitField0_ & ~0x00000001); displayUploadProductType_ = 0; onChanged(); return this; } private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v19.common.AdMediaBundleAsset, com.google.ads.googleads.v19.common.AdMediaBundleAsset.Builder, com.google.ads.googleads.v19.common.AdMediaBundleAssetOrBuilder> mediaBundleBuilder_; /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v19.common.AdMediaBundleAsset media_bundle = 2;</code> * @return Whether the mediaBundle field is set. */ @java.lang.Override public boolean hasMediaBundle() { return mediaAssetCase_ == 2; } /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v19.common.AdMediaBundleAsset media_bundle = 2;</code> * @return The mediaBundle. */ @java.lang.Override public com.google.ads.googleads.v19.common.AdMediaBundleAsset getMediaBundle() { if (mediaBundleBuilder_ == null) { if (mediaAssetCase_ == 2) { return (com.google.ads.googleads.v19.common.AdMediaBundleAsset) mediaAsset_; } return com.google.ads.googleads.v19.common.AdMediaBundleAsset.getDefaultInstance(); } else { if (mediaAssetCase_ == 2) { return mediaBundleBuilder_.getMessage(); } return com.google.ads.googleads.v19.common.AdMediaBundleAsset.getDefaultInstance(); } } /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v19.common.AdMediaBundleAsset media_bundle = 2;</code> */ public Builder setMediaBundle(com.google.ads.googleads.v19.common.AdMediaBundleAsset value) { if (mediaBundleBuilder_ == null) { if (value == null) { throw new NullPointerException(); } mediaAsset_ = value; onChanged(); } else { mediaBundleBuilder_.setMessage(value); } mediaAssetCase_ = 2; return this; } /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v19.common.AdMediaBundleAsset media_bundle = 2;</code> */ public Builder setMediaBundle( com.google.ads.googleads.v19.common.AdMediaBundleAsset.Builder builderForValue) { if (mediaBundleBuilder_ == null) { mediaAsset_ = builderForValue.build(); onChanged(); } else { mediaBundleBuilder_.setMessage(builderForValue.build()); } mediaAssetCase_ = 2; return this; } /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v19.common.AdMediaBundleAsset media_bundle = 2;</code> */ public Builder mergeMediaBundle(com.google.ads.googleads.v19.common.AdMediaBundleAsset value) { if (mediaBundleBuilder_ == null) { if (mediaAssetCase_ == 2 && mediaAsset_ != com.google.ads.googleads.v19.common.AdMediaBundleAsset.getDefaultInstance()) { mediaAsset_ = com.google.ads.googleads.v19.common.AdMediaBundleAsset.newBuilder((com.google.ads.googleads.v19.common.AdMediaBundleAsset) mediaAsset_) .mergeFrom(value).buildPartial(); } else { mediaAsset_ = value; } onChanged(); } else { if (mediaAssetCase_ == 2) { mediaBundleBuilder_.mergeFrom(value); } else { mediaBundleBuilder_.setMessage(value); } } mediaAssetCase_ = 2; return this; } /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v19.common.AdMediaBundleAsset media_bundle = 2;</code> */ public Builder clearMediaBundle() { if (mediaBundleBuilder_ == null) { if (mediaAssetCase_ == 2) { mediaAssetCase_ = 0; mediaAsset_ = null; onChanged(); } } else { if (mediaAssetCase_ == 2) { mediaAssetCase_ = 0; mediaAsset_ = null; } mediaBundleBuilder_.clear(); } return this; } /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v19.common.AdMediaBundleAsset media_bundle = 2;</code> */ public com.google.ads.googleads.v19.common.AdMediaBundleAsset.Builder getMediaBundleBuilder() { return getMediaBundleFieldBuilder().getBuilder(); } /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v19.common.AdMediaBundleAsset media_bundle = 2;</code> */ @java.lang.Override public com.google.ads.googleads.v19.common.AdMediaBundleAssetOrBuilder getMediaBundleOrBuilder() { if ((mediaAssetCase_ == 2) && (mediaBundleBuilder_ != null)) { return mediaBundleBuilder_.getMessageOrBuilder(); } else { if (mediaAssetCase_ == 2) { return (com.google.ads.googleads.v19.common.AdMediaBundleAsset) mediaAsset_; } return com.google.ads.googleads.v19.common.AdMediaBundleAsset.getDefaultInstance(); } } /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v19.common.AdMediaBundleAsset media_bundle = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v19.common.AdMediaBundleAsset, com.google.ads.googleads.v19.common.AdMediaBundleAsset.Builder, com.google.ads.googleads.v19.common.AdMediaBundleAssetOrBuilder> getMediaBundleFieldBuilder() { if (mediaBundleBuilder_ == null) { if (!(mediaAssetCase_ == 2)) { mediaAsset_ = com.google.ads.googleads.v19.common.AdMediaBundleAsset.getDefaultInstance(); } mediaBundleBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v19.common.AdMediaBundleAsset, com.google.ads.googleads.v19.common.AdMediaBundleAsset.Builder, com.google.ads.googleads.v19.common.AdMediaBundleAssetOrBuilder>( (com.google.ads.googleads.v19.common.AdMediaBundleAsset) mediaAsset_, getParentForChildren(), isClean()); mediaAsset_ = null; } mediaAssetCase_ = 2; onChanged(); return mediaBundleBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.common.DisplayUploadAdInfo) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v19.common.DisplayUploadAdInfo) private static final com.google.ads.googleads.v19.common.DisplayUploadAdInfo DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v19.common.DisplayUploadAdInfo(); } public static com.google.ads.googleads.v19.common.DisplayUploadAdInfo getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DisplayUploadAdInfo> PARSER = new com.google.protobuf.AbstractParser<DisplayUploadAdInfo>() { @java.lang.Override public DisplayUploadAdInfo parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<DisplayUploadAdInfo> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DisplayUploadAdInfo> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v19.common.DisplayUploadAdInfo getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
36,767
google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/common/DisplayUploadAdInfo.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v20/common/ad_type_infos.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v20.common; /** * <pre> * A generic type of display ad. The exact ad format is controlled by the * `display_upload_product_type` field, which determines what kinds of data * need to be included with the ad. * </pre> * * Protobuf type {@code google.ads.googleads.v20.common.DisplayUploadAdInfo} */ public final class DisplayUploadAdInfo extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v20.common.DisplayUploadAdInfo) DisplayUploadAdInfoOrBuilder { private static final long serialVersionUID = 0L; // Use DisplayUploadAdInfo.newBuilder() to construct. private DisplayUploadAdInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DisplayUploadAdInfo() { displayUploadProductType_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new DisplayUploadAdInfo(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.common.AdTypeInfosProto.internal_static_google_ads_googleads_v20_common_DisplayUploadAdInfo_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.common.AdTypeInfosProto.internal_static_google_ads_googleads_v20_common_DisplayUploadAdInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.common.DisplayUploadAdInfo.class, com.google.ads.googleads.v20.common.DisplayUploadAdInfo.Builder.class); } private int mediaAssetCase_ = 0; @SuppressWarnings("serial") private java.lang.Object mediaAsset_; public enum MediaAssetCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { MEDIA_BUNDLE(2), MEDIAASSET_NOT_SET(0); private final int value; private MediaAssetCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static MediaAssetCase valueOf(int value) { return forNumber(value); } public static MediaAssetCase forNumber(int value) { switch (value) { case 2: return MEDIA_BUNDLE; case 0: return MEDIAASSET_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public MediaAssetCase getMediaAssetCase() { return MediaAssetCase.forNumber( mediaAssetCase_); } public static final int DISPLAY_UPLOAD_PRODUCT_TYPE_FIELD_NUMBER = 1; private int displayUploadProductType_ = 0; /** * <pre> * The product type of this ad. See comments on the enum for details. * </pre> * * <code>.google.ads.googleads.v20.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType display_upload_product_type = 1;</code> * @return The enum numeric value on the wire for displayUploadProductType. */ @java.lang.Override public int getDisplayUploadProductTypeValue() { return displayUploadProductType_; } /** * <pre> * The product type of this ad. See comments on the enum for details. * </pre> * * <code>.google.ads.googleads.v20.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType display_upload_product_type = 1;</code> * @return The displayUploadProductType. */ @java.lang.Override public com.google.ads.googleads.v20.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType getDisplayUploadProductType() { com.google.ads.googleads.v20.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType result = com.google.ads.googleads.v20.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType.forNumber(displayUploadProductType_); return result == null ? com.google.ads.googleads.v20.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType.UNRECOGNIZED : result; } public static final int MEDIA_BUNDLE_FIELD_NUMBER = 2; /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v20.common.AdMediaBundleAsset media_bundle = 2;</code> * @return Whether the mediaBundle field is set. */ @java.lang.Override public boolean hasMediaBundle() { return mediaAssetCase_ == 2; } /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v20.common.AdMediaBundleAsset media_bundle = 2;</code> * @return The mediaBundle. */ @java.lang.Override public com.google.ads.googleads.v20.common.AdMediaBundleAsset getMediaBundle() { if (mediaAssetCase_ == 2) { return (com.google.ads.googleads.v20.common.AdMediaBundleAsset) mediaAsset_; } return com.google.ads.googleads.v20.common.AdMediaBundleAsset.getDefaultInstance(); } /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v20.common.AdMediaBundleAsset media_bundle = 2;</code> */ @java.lang.Override public com.google.ads.googleads.v20.common.AdMediaBundleAssetOrBuilder getMediaBundleOrBuilder() { if (mediaAssetCase_ == 2) { return (com.google.ads.googleads.v20.common.AdMediaBundleAsset) mediaAsset_; } return com.google.ads.googleads.v20.common.AdMediaBundleAsset.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (displayUploadProductType_ != com.google.ads.googleads.v20.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType.UNSPECIFIED.getNumber()) { output.writeEnum(1, displayUploadProductType_); } if (mediaAssetCase_ == 2) { output.writeMessage(2, (com.google.ads.googleads.v20.common.AdMediaBundleAsset) mediaAsset_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (displayUploadProductType_ != com.google.ads.googleads.v20.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType.UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(1, displayUploadProductType_); } if (mediaAssetCase_ == 2) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, (com.google.ads.googleads.v20.common.AdMediaBundleAsset) mediaAsset_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v20.common.DisplayUploadAdInfo)) { return super.equals(obj); } com.google.ads.googleads.v20.common.DisplayUploadAdInfo other = (com.google.ads.googleads.v20.common.DisplayUploadAdInfo) obj; if (displayUploadProductType_ != other.displayUploadProductType_) return false; if (!getMediaAssetCase().equals(other.getMediaAssetCase())) return false; switch (mediaAssetCase_) { case 2: if (!getMediaBundle() .equals(other.getMediaBundle())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + DISPLAY_UPLOAD_PRODUCT_TYPE_FIELD_NUMBER; hash = (53 * hash) + displayUploadProductType_; switch (mediaAssetCase_) { case 2: hash = (37 * hash) + MEDIA_BUNDLE_FIELD_NUMBER; hash = (53 * hash) + getMediaBundle().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v20.common.DisplayUploadAdInfo parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.common.DisplayUploadAdInfo parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.common.DisplayUploadAdInfo parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.common.DisplayUploadAdInfo parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.common.DisplayUploadAdInfo parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.common.DisplayUploadAdInfo parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.common.DisplayUploadAdInfo parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.common.DisplayUploadAdInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.common.DisplayUploadAdInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.common.DisplayUploadAdInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.common.DisplayUploadAdInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.common.DisplayUploadAdInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v20.common.DisplayUploadAdInfo prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * A generic type of display ad. The exact ad format is controlled by the * `display_upload_product_type` field, which determines what kinds of data * need to be included with the ad. * </pre> * * Protobuf type {@code google.ads.googleads.v20.common.DisplayUploadAdInfo} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.common.DisplayUploadAdInfo) com.google.ads.googleads.v20.common.DisplayUploadAdInfoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.common.AdTypeInfosProto.internal_static_google_ads_googleads_v20_common_DisplayUploadAdInfo_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.common.AdTypeInfosProto.internal_static_google_ads_googleads_v20_common_DisplayUploadAdInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.common.DisplayUploadAdInfo.class, com.google.ads.googleads.v20.common.DisplayUploadAdInfo.Builder.class); } // Construct using com.google.ads.googleads.v20.common.DisplayUploadAdInfo.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; displayUploadProductType_ = 0; if (mediaBundleBuilder_ != null) { mediaBundleBuilder_.clear(); } mediaAssetCase_ = 0; mediaAsset_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v20.common.AdTypeInfosProto.internal_static_google_ads_googleads_v20_common_DisplayUploadAdInfo_descriptor; } @java.lang.Override public com.google.ads.googleads.v20.common.DisplayUploadAdInfo getDefaultInstanceForType() { return com.google.ads.googleads.v20.common.DisplayUploadAdInfo.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v20.common.DisplayUploadAdInfo build() { com.google.ads.googleads.v20.common.DisplayUploadAdInfo result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v20.common.DisplayUploadAdInfo buildPartial() { com.google.ads.googleads.v20.common.DisplayUploadAdInfo result = new com.google.ads.googleads.v20.common.DisplayUploadAdInfo(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v20.common.DisplayUploadAdInfo result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.displayUploadProductType_ = displayUploadProductType_; } } private void buildPartialOneofs(com.google.ads.googleads.v20.common.DisplayUploadAdInfo result) { result.mediaAssetCase_ = mediaAssetCase_; result.mediaAsset_ = this.mediaAsset_; if (mediaAssetCase_ == 2 && mediaBundleBuilder_ != null) { result.mediaAsset_ = mediaBundleBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v20.common.DisplayUploadAdInfo) { return mergeFrom((com.google.ads.googleads.v20.common.DisplayUploadAdInfo)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v20.common.DisplayUploadAdInfo other) { if (other == com.google.ads.googleads.v20.common.DisplayUploadAdInfo.getDefaultInstance()) return this; if (other.displayUploadProductType_ != 0) { setDisplayUploadProductTypeValue(other.getDisplayUploadProductTypeValue()); } switch (other.getMediaAssetCase()) { case MEDIA_BUNDLE: { mergeMediaBundle(other.getMediaBundle()); break; } case MEDIAASSET_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { displayUploadProductType_ = input.readEnum(); bitField0_ |= 0x00000001; break; } // case 8 case 18: { input.readMessage( getMediaBundleFieldBuilder().getBuilder(), extensionRegistry); mediaAssetCase_ = 2; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int mediaAssetCase_ = 0; private java.lang.Object mediaAsset_; public MediaAssetCase getMediaAssetCase() { return MediaAssetCase.forNumber( mediaAssetCase_); } public Builder clearMediaAsset() { mediaAssetCase_ = 0; mediaAsset_ = null; onChanged(); return this; } private int bitField0_; private int displayUploadProductType_ = 0; /** * <pre> * The product type of this ad. See comments on the enum for details. * </pre> * * <code>.google.ads.googleads.v20.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType display_upload_product_type = 1;</code> * @return The enum numeric value on the wire for displayUploadProductType. */ @java.lang.Override public int getDisplayUploadProductTypeValue() { return displayUploadProductType_; } /** * <pre> * The product type of this ad. See comments on the enum for details. * </pre> * * <code>.google.ads.googleads.v20.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType display_upload_product_type = 1;</code> * @param value The enum numeric value on the wire for displayUploadProductType to set. * @return This builder for chaining. */ public Builder setDisplayUploadProductTypeValue(int value) { displayUploadProductType_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * The product type of this ad. See comments on the enum for details. * </pre> * * <code>.google.ads.googleads.v20.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType display_upload_product_type = 1;</code> * @return The displayUploadProductType. */ @java.lang.Override public com.google.ads.googleads.v20.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType getDisplayUploadProductType() { com.google.ads.googleads.v20.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType result = com.google.ads.googleads.v20.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType.forNumber(displayUploadProductType_); return result == null ? com.google.ads.googleads.v20.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType.UNRECOGNIZED : result; } /** * <pre> * The product type of this ad. See comments on the enum for details. * </pre> * * <code>.google.ads.googleads.v20.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType display_upload_product_type = 1;</code> * @param value The displayUploadProductType to set. * @return This builder for chaining. */ public Builder setDisplayUploadProductType(com.google.ads.googleads.v20.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; displayUploadProductType_ = value.getNumber(); onChanged(); return this; } /** * <pre> * The product type of this ad. See comments on the enum for details. * </pre> * * <code>.google.ads.googleads.v20.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType display_upload_product_type = 1;</code> * @return This builder for chaining. */ public Builder clearDisplayUploadProductType() { bitField0_ = (bitField0_ & ~0x00000001); displayUploadProductType_ = 0; onChanged(); return this; } private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v20.common.AdMediaBundleAsset, com.google.ads.googleads.v20.common.AdMediaBundleAsset.Builder, com.google.ads.googleads.v20.common.AdMediaBundleAssetOrBuilder> mediaBundleBuilder_; /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v20.common.AdMediaBundleAsset media_bundle = 2;</code> * @return Whether the mediaBundle field is set. */ @java.lang.Override public boolean hasMediaBundle() { return mediaAssetCase_ == 2; } /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v20.common.AdMediaBundleAsset media_bundle = 2;</code> * @return The mediaBundle. */ @java.lang.Override public com.google.ads.googleads.v20.common.AdMediaBundleAsset getMediaBundle() { if (mediaBundleBuilder_ == null) { if (mediaAssetCase_ == 2) { return (com.google.ads.googleads.v20.common.AdMediaBundleAsset) mediaAsset_; } return com.google.ads.googleads.v20.common.AdMediaBundleAsset.getDefaultInstance(); } else { if (mediaAssetCase_ == 2) { return mediaBundleBuilder_.getMessage(); } return com.google.ads.googleads.v20.common.AdMediaBundleAsset.getDefaultInstance(); } } /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v20.common.AdMediaBundleAsset media_bundle = 2;</code> */ public Builder setMediaBundle(com.google.ads.googleads.v20.common.AdMediaBundleAsset value) { if (mediaBundleBuilder_ == null) { if (value == null) { throw new NullPointerException(); } mediaAsset_ = value; onChanged(); } else { mediaBundleBuilder_.setMessage(value); } mediaAssetCase_ = 2; return this; } /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v20.common.AdMediaBundleAsset media_bundle = 2;</code> */ public Builder setMediaBundle( com.google.ads.googleads.v20.common.AdMediaBundleAsset.Builder builderForValue) { if (mediaBundleBuilder_ == null) { mediaAsset_ = builderForValue.build(); onChanged(); } else { mediaBundleBuilder_.setMessage(builderForValue.build()); } mediaAssetCase_ = 2; return this; } /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v20.common.AdMediaBundleAsset media_bundle = 2;</code> */ public Builder mergeMediaBundle(com.google.ads.googleads.v20.common.AdMediaBundleAsset value) { if (mediaBundleBuilder_ == null) { if (mediaAssetCase_ == 2 && mediaAsset_ != com.google.ads.googleads.v20.common.AdMediaBundleAsset.getDefaultInstance()) { mediaAsset_ = com.google.ads.googleads.v20.common.AdMediaBundleAsset.newBuilder((com.google.ads.googleads.v20.common.AdMediaBundleAsset) mediaAsset_) .mergeFrom(value).buildPartial(); } else { mediaAsset_ = value; } onChanged(); } else { if (mediaAssetCase_ == 2) { mediaBundleBuilder_.mergeFrom(value); } else { mediaBundleBuilder_.setMessage(value); } } mediaAssetCase_ = 2; return this; } /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v20.common.AdMediaBundleAsset media_bundle = 2;</code> */ public Builder clearMediaBundle() { if (mediaBundleBuilder_ == null) { if (mediaAssetCase_ == 2) { mediaAssetCase_ = 0; mediaAsset_ = null; onChanged(); } } else { if (mediaAssetCase_ == 2) { mediaAssetCase_ = 0; mediaAsset_ = null; } mediaBundleBuilder_.clear(); } return this; } /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v20.common.AdMediaBundleAsset media_bundle = 2;</code> */ public com.google.ads.googleads.v20.common.AdMediaBundleAsset.Builder getMediaBundleBuilder() { return getMediaBundleFieldBuilder().getBuilder(); } /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v20.common.AdMediaBundleAsset media_bundle = 2;</code> */ @java.lang.Override public com.google.ads.googleads.v20.common.AdMediaBundleAssetOrBuilder getMediaBundleOrBuilder() { if ((mediaAssetCase_ == 2) && (mediaBundleBuilder_ != null)) { return mediaBundleBuilder_.getMessageOrBuilder(); } else { if (mediaAssetCase_ == 2) { return (com.google.ads.googleads.v20.common.AdMediaBundleAsset) mediaAsset_; } return com.google.ads.googleads.v20.common.AdMediaBundleAsset.getDefaultInstance(); } } /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v20.common.AdMediaBundleAsset media_bundle = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v20.common.AdMediaBundleAsset, com.google.ads.googleads.v20.common.AdMediaBundleAsset.Builder, com.google.ads.googleads.v20.common.AdMediaBundleAssetOrBuilder> getMediaBundleFieldBuilder() { if (mediaBundleBuilder_ == null) { if (!(mediaAssetCase_ == 2)) { mediaAsset_ = com.google.ads.googleads.v20.common.AdMediaBundleAsset.getDefaultInstance(); } mediaBundleBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v20.common.AdMediaBundleAsset, com.google.ads.googleads.v20.common.AdMediaBundleAsset.Builder, com.google.ads.googleads.v20.common.AdMediaBundleAssetOrBuilder>( (com.google.ads.googleads.v20.common.AdMediaBundleAsset) mediaAsset_, getParentForChildren(), isClean()); mediaAsset_ = null; } mediaAssetCase_ = 2; onChanged(); return mediaBundleBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.common.DisplayUploadAdInfo) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v20.common.DisplayUploadAdInfo) private static final com.google.ads.googleads.v20.common.DisplayUploadAdInfo DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v20.common.DisplayUploadAdInfo(); } public static com.google.ads.googleads.v20.common.DisplayUploadAdInfo getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DisplayUploadAdInfo> PARSER = new com.google.protobuf.AbstractParser<DisplayUploadAdInfo>() { @java.lang.Override public DisplayUploadAdInfo parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<DisplayUploadAdInfo> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DisplayUploadAdInfo> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v20.common.DisplayUploadAdInfo getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
36,767
google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/common/DisplayUploadAdInfo.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v21/common/ad_type_infos.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v21.common; /** * <pre> * A generic type of display ad. The exact ad format is controlled by the * `display_upload_product_type` field, which determines what kinds of data * need to be included with the ad. * </pre> * * Protobuf type {@code google.ads.googleads.v21.common.DisplayUploadAdInfo} */ public final class DisplayUploadAdInfo extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v21.common.DisplayUploadAdInfo) DisplayUploadAdInfoOrBuilder { private static final long serialVersionUID = 0L; // Use DisplayUploadAdInfo.newBuilder() to construct. private DisplayUploadAdInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DisplayUploadAdInfo() { displayUploadProductType_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new DisplayUploadAdInfo(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.common.AdTypeInfosProto.internal_static_google_ads_googleads_v21_common_DisplayUploadAdInfo_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.common.AdTypeInfosProto.internal_static_google_ads_googleads_v21_common_DisplayUploadAdInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.common.DisplayUploadAdInfo.class, com.google.ads.googleads.v21.common.DisplayUploadAdInfo.Builder.class); } private int mediaAssetCase_ = 0; @SuppressWarnings("serial") private java.lang.Object mediaAsset_; public enum MediaAssetCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { MEDIA_BUNDLE(2), MEDIAASSET_NOT_SET(0); private final int value; private MediaAssetCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static MediaAssetCase valueOf(int value) { return forNumber(value); } public static MediaAssetCase forNumber(int value) { switch (value) { case 2: return MEDIA_BUNDLE; case 0: return MEDIAASSET_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public MediaAssetCase getMediaAssetCase() { return MediaAssetCase.forNumber( mediaAssetCase_); } public static final int DISPLAY_UPLOAD_PRODUCT_TYPE_FIELD_NUMBER = 1; private int displayUploadProductType_ = 0; /** * <pre> * The product type of this ad. See comments on the enum for details. * </pre> * * <code>.google.ads.googleads.v21.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType display_upload_product_type = 1;</code> * @return The enum numeric value on the wire for displayUploadProductType. */ @java.lang.Override public int getDisplayUploadProductTypeValue() { return displayUploadProductType_; } /** * <pre> * The product type of this ad. See comments on the enum for details. * </pre> * * <code>.google.ads.googleads.v21.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType display_upload_product_type = 1;</code> * @return The displayUploadProductType. */ @java.lang.Override public com.google.ads.googleads.v21.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType getDisplayUploadProductType() { com.google.ads.googleads.v21.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType result = com.google.ads.googleads.v21.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType.forNumber(displayUploadProductType_); return result == null ? com.google.ads.googleads.v21.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType.UNRECOGNIZED : result; } public static final int MEDIA_BUNDLE_FIELD_NUMBER = 2; /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v21.common.AdMediaBundleAsset media_bundle = 2;</code> * @return Whether the mediaBundle field is set. */ @java.lang.Override public boolean hasMediaBundle() { return mediaAssetCase_ == 2; } /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v21.common.AdMediaBundleAsset media_bundle = 2;</code> * @return The mediaBundle. */ @java.lang.Override public com.google.ads.googleads.v21.common.AdMediaBundleAsset getMediaBundle() { if (mediaAssetCase_ == 2) { return (com.google.ads.googleads.v21.common.AdMediaBundleAsset) mediaAsset_; } return com.google.ads.googleads.v21.common.AdMediaBundleAsset.getDefaultInstance(); } /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v21.common.AdMediaBundleAsset media_bundle = 2;</code> */ @java.lang.Override public com.google.ads.googleads.v21.common.AdMediaBundleAssetOrBuilder getMediaBundleOrBuilder() { if (mediaAssetCase_ == 2) { return (com.google.ads.googleads.v21.common.AdMediaBundleAsset) mediaAsset_; } return com.google.ads.googleads.v21.common.AdMediaBundleAsset.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (displayUploadProductType_ != com.google.ads.googleads.v21.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType.UNSPECIFIED.getNumber()) { output.writeEnum(1, displayUploadProductType_); } if (mediaAssetCase_ == 2) { output.writeMessage(2, (com.google.ads.googleads.v21.common.AdMediaBundleAsset) mediaAsset_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (displayUploadProductType_ != com.google.ads.googleads.v21.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType.UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(1, displayUploadProductType_); } if (mediaAssetCase_ == 2) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, (com.google.ads.googleads.v21.common.AdMediaBundleAsset) mediaAsset_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v21.common.DisplayUploadAdInfo)) { return super.equals(obj); } com.google.ads.googleads.v21.common.DisplayUploadAdInfo other = (com.google.ads.googleads.v21.common.DisplayUploadAdInfo) obj; if (displayUploadProductType_ != other.displayUploadProductType_) return false; if (!getMediaAssetCase().equals(other.getMediaAssetCase())) return false; switch (mediaAssetCase_) { case 2: if (!getMediaBundle() .equals(other.getMediaBundle())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + DISPLAY_UPLOAD_PRODUCT_TYPE_FIELD_NUMBER; hash = (53 * hash) + displayUploadProductType_; switch (mediaAssetCase_) { case 2: hash = (37 * hash) + MEDIA_BUNDLE_FIELD_NUMBER; hash = (53 * hash) + getMediaBundle().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v21.common.DisplayUploadAdInfo parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.common.DisplayUploadAdInfo parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.common.DisplayUploadAdInfo parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.common.DisplayUploadAdInfo parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.common.DisplayUploadAdInfo parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.common.DisplayUploadAdInfo parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.common.DisplayUploadAdInfo parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.common.DisplayUploadAdInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.common.DisplayUploadAdInfo parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.common.DisplayUploadAdInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.common.DisplayUploadAdInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.common.DisplayUploadAdInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v21.common.DisplayUploadAdInfo prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * A generic type of display ad. The exact ad format is controlled by the * `display_upload_product_type` field, which determines what kinds of data * need to be included with the ad. * </pre> * * Protobuf type {@code google.ads.googleads.v21.common.DisplayUploadAdInfo} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.common.DisplayUploadAdInfo) com.google.ads.googleads.v21.common.DisplayUploadAdInfoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.common.AdTypeInfosProto.internal_static_google_ads_googleads_v21_common_DisplayUploadAdInfo_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.common.AdTypeInfosProto.internal_static_google_ads_googleads_v21_common_DisplayUploadAdInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.common.DisplayUploadAdInfo.class, com.google.ads.googleads.v21.common.DisplayUploadAdInfo.Builder.class); } // Construct using com.google.ads.googleads.v21.common.DisplayUploadAdInfo.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; displayUploadProductType_ = 0; if (mediaBundleBuilder_ != null) { mediaBundleBuilder_.clear(); } mediaAssetCase_ = 0; mediaAsset_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v21.common.AdTypeInfosProto.internal_static_google_ads_googleads_v21_common_DisplayUploadAdInfo_descriptor; } @java.lang.Override public com.google.ads.googleads.v21.common.DisplayUploadAdInfo getDefaultInstanceForType() { return com.google.ads.googleads.v21.common.DisplayUploadAdInfo.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v21.common.DisplayUploadAdInfo build() { com.google.ads.googleads.v21.common.DisplayUploadAdInfo result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v21.common.DisplayUploadAdInfo buildPartial() { com.google.ads.googleads.v21.common.DisplayUploadAdInfo result = new com.google.ads.googleads.v21.common.DisplayUploadAdInfo(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v21.common.DisplayUploadAdInfo result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.displayUploadProductType_ = displayUploadProductType_; } } private void buildPartialOneofs(com.google.ads.googleads.v21.common.DisplayUploadAdInfo result) { result.mediaAssetCase_ = mediaAssetCase_; result.mediaAsset_ = this.mediaAsset_; if (mediaAssetCase_ == 2 && mediaBundleBuilder_ != null) { result.mediaAsset_ = mediaBundleBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v21.common.DisplayUploadAdInfo) { return mergeFrom((com.google.ads.googleads.v21.common.DisplayUploadAdInfo)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v21.common.DisplayUploadAdInfo other) { if (other == com.google.ads.googleads.v21.common.DisplayUploadAdInfo.getDefaultInstance()) return this; if (other.displayUploadProductType_ != 0) { setDisplayUploadProductTypeValue(other.getDisplayUploadProductTypeValue()); } switch (other.getMediaAssetCase()) { case MEDIA_BUNDLE: { mergeMediaBundle(other.getMediaBundle()); break; } case MEDIAASSET_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { displayUploadProductType_ = input.readEnum(); bitField0_ |= 0x00000001; break; } // case 8 case 18: { input.readMessage( getMediaBundleFieldBuilder().getBuilder(), extensionRegistry); mediaAssetCase_ = 2; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int mediaAssetCase_ = 0; private java.lang.Object mediaAsset_; public MediaAssetCase getMediaAssetCase() { return MediaAssetCase.forNumber( mediaAssetCase_); } public Builder clearMediaAsset() { mediaAssetCase_ = 0; mediaAsset_ = null; onChanged(); return this; } private int bitField0_; private int displayUploadProductType_ = 0; /** * <pre> * The product type of this ad. See comments on the enum for details. * </pre> * * <code>.google.ads.googleads.v21.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType display_upload_product_type = 1;</code> * @return The enum numeric value on the wire for displayUploadProductType. */ @java.lang.Override public int getDisplayUploadProductTypeValue() { return displayUploadProductType_; } /** * <pre> * The product type of this ad. See comments on the enum for details. * </pre> * * <code>.google.ads.googleads.v21.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType display_upload_product_type = 1;</code> * @param value The enum numeric value on the wire for displayUploadProductType to set. * @return This builder for chaining. */ public Builder setDisplayUploadProductTypeValue(int value) { displayUploadProductType_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * The product type of this ad. See comments on the enum for details. * </pre> * * <code>.google.ads.googleads.v21.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType display_upload_product_type = 1;</code> * @return The displayUploadProductType. */ @java.lang.Override public com.google.ads.googleads.v21.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType getDisplayUploadProductType() { com.google.ads.googleads.v21.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType result = com.google.ads.googleads.v21.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType.forNumber(displayUploadProductType_); return result == null ? com.google.ads.googleads.v21.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType.UNRECOGNIZED : result; } /** * <pre> * The product type of this ad. See comments on the enum for details. * </pre> * * <code>.google.ads.googleads.v21.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType display_upload_product_type = 1;</code> * @param value The displayUploadProductType to set. * @return This builder for chaining. */ public Builder setDisplayUploadProductType(com.google.ads.googleads.v21.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; displayUploadProductType_ = value.getNumber(); onChanged(); return this; } /** * <pre> * The product type of this ad. See comments on the enum for details. * </pre> * * <code>.google.ads.googleads.v21.enums.DisplayUploadProductTypeEnum.DisplayUploadProductType display_upload_product_type = 1;</code> * @return This builder for chaining. */ public Builder clearDisplayUploadProductType() { bitField0_ = (bitField0_ & ~0x00000001); displayUploadProductType_ = 0; onChanged(); return this; } private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v21.common.AdMediaBundleAsset, com.google.ads.googleads.v21.common.AdMediaBundleAsset.Builder, com.google.ads.googleads.v21.common.AdMediaBundleAssetOrBuilder> mediaBundleBuilder_; /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v21.common.AdMediaBundleAsset media_bundle = 2;</code> * @return Whether the mediaBundle field is set. */ @java.lang.Override public boolean hasMediaBundle() { return mediaAssetCase_ == 2; } /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v21.common.AdMediaBundleAsset media_bundle = 2;</code> * @return The mediaBundle. */ @java.lang.Override public com.google.ads.googleads.v21.common.AdMediaBundleAsset getMediaBundle() { if (mediaBundleBuilder_ == null) { if (mediaAssetCase_ == 2) { return (com.google.ads.googleads.v21.common.AdMediaBundleAsset) mediaAsset_; } return com.google.ads.googleads.v21.common.AdMediaBundleAsset.getDefaultInstance(); } else { if (mediaAssetCase_ == 2) { return mediaBundleBuilder_.getMessage(); } return com.google.ads.googleads.v21.common.AdMediaBundleAsset.getDefaultInstance(); } } /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v21.common.AdMediaBundleAsset media_bundle = 2;</code> */ public Builder setMediaBundle(com.google.ads.googleads.v21.common.AdMediaBundleAsset value) { if (mediaBundleBuilder_ == null) { if (value == null) { throw new NullPointerException(); } mediaAsset_ = value; onChanged(); } else { mediaBundleBuilder_.setMessage(value); } mediaAssetCase_ = 2; return this; } /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v21.common.AdMediaBundleAsset media_bundle = 2;</code> */ public Builder setMediaBundle( com.google.ads.googleads.v21.common.AdMediaBundleAsset.Builder builderForValue) { if (mediaBundleBuilder_ == null) { mediaAsset_ = builderForValue.build(); onChanged(); } else { mediaBundleBuilder_.setMessage(builderForValue.build()); } mediaAssetCase_ = 2; return this; } /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v21.common.AdMediaBundleAsset media_bundle = 2;</code> */ public Builder mergeMediaBundle(com.google.ads.googleads.v21.common.AdMediaBundleAsset value) { if (mediaBundleBuilder_ == null) { if (mediaAssetCase_ == 2 && mediaAsset_ != com.google.ads.googleads.v21.common.AdMediaBundleAsset.getDefaultInstance()) { mediaAsset_ = com.google.ads.googleads.v21.common.AdMediaBundleAsset.newBuilder((com.google.ads.googleads.v21.common.AdMediaBundleAsset) mediaAsset_) .mergeFrom(value).buildPartial(); } else { mediaAsset_ = value; } onChanged(); } else { if (mediaAssetCase_ == 2) { mediaBundleBuilder_.mergeFrom(value); } else { mediaBundleBuilder_.setMessage(value); } } mediaAssetCase_ = 2; return this; } /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v21.common.AdMediaBundleAsset media_bundle = 2;</code> */ public Builder clearMediaBundle() { if (mediaBundleBuilder_ == null) { if (mediaAssetCase_ == 2) { mediaAssetCase_ = 0; mediaAsset_ = null; onChanged(); } } else { if (mediaAssetCase_ == 2) { mediaAssetCase_ = 0; mediaAsset_ = null; } mediaBundleBuilder_.clear(); } return this; } /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v21.common.AdMediaBundleAsset media_bundle = 2;</code> */ public com.google.ads.googleads.v21.common.AdMediaBundleAsset.Builder getMediaBundleBuilder() { return getMediaBundleFieldBuilder().getBuilder(); } /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v21.common.AdMediaBundleAsset media_bundle = 2;</code> */ @java.lang.Override public com.google.ads.googleads.v21.common.AdMediaBundleAssetOrBuilder getMediaBundleOrBuilder() { if ((mediaAssetCase_ == 2) && (mediaBundleBuilder_ != null)) { return mediaBundleBuilder_.getMessageOrBuilder(); } else { if (mediaAssetCase_ == 2) { return (com.google.ads.googleads.v21.common.AdMediaBundleAsset) mediaAsset_; } return com.google.ads.googleads.v21.common.AdMediaBundleAsset.getDefaultInstance(); } } /** * <pre> * A media bundle asset to be used in the ad. For information about the * media bundle for HTML5_UPLOAD_AD, see * https://support.google.com/google-ads/answer/1722096 * Media bundles that are part of dynamic product types use a special format * that needs to be created through the Google Web Designer. See * https://support.google.com/webdesigner/answer/7543898 for more * information. * </pre> * * <code>.google.ads.googleads.v21.common.AdMediaBundleAsset media_bundle = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v21.common.AdMediaBundleAsset, com.google.ads.googleads.v21.common.AdMediaBundleAsset.Builder, com.google.ads.googleads.v21.common.AdMediaBundleAssetOrBuilder> getMediaBundleFieldBuilder() { if (mediaBundleBuilder_ == null) { if (!(mediaAssetCase_ == 2)) { mediaAsset_ = com.google.ads.googleads.v21.common.AdMediaBundleAsset.getDefaultInstance(); } mediaBundleBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v21.common.AdMediaBundleAsset, com.google.ads.googleads.v21.common.AdMediaBundleAsset.Builder, com.google.ads.googleads.v21.common.AdMediaBundleAssetOrBuilder>( (com.google.ads.googleads.v21.common.AdMediaBundleAsset) mediaAsset_, getParentForChildren(), isClean()); mediaAsset_ = null; } mediaAssetCase_ = 2; onChanged(); return mediaBundleBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.common.DisplayUploadAdInfo) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v21.common.DisplayUploadAdInfo) private static final com.google.ads.googleads.v21.common.DisplayUploadAdInfo DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v21.common.DisplayUploadAdInfo(); } public static com.google.ads.googleads.v21.common.DisplayUploadAdInfo getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DisplayUploadAdInfo> PARSER = new com.google.protobuf.AbstractParser<DisplayUploadAdInfo>() { @java.lang.Override public DisplayUploadAdInfo parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<DisplayUploadAdInfo> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DisplayUploadAdInfo> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v21.common.DisplayUploadAdInfo getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
oracle/graal
36,972
wasm/src/org.graalvm.wasm/src/org/graalvm/wasm/constants/Bytecode.java
/* * Copyright (c) 2022, 2025, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * The Universal Permissive License (UPL), Version 1.0 * * Subject to the condition set forth below, permission is hereby granted to any * person obtaining a copy of this software, associated documentation and/or * data (collectively the "Software"), free of charge and under any and all * copyright rights in the Software, and any and all patent rights owned or * freely licensable by each licensor hereunder covering either (i) the * unmodified Software as contributed to or provided by such licensor, or (ii) * the Larger Works (as defined below), to deal in both * * (a) the Software, and * * (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if * one is included with the Software each a "Larger Work" to which the Software * is contributed by such licensors), * * without restriction, including without limitation the rights to copy, create * derivative works of, display, perform, and distribute the Software and make, * use, sell, offer for sale, import, export, have made, and have sold the * Software and the Larger Work(s), and to sublicense the foregoing rights on * either these or other terms. * * This license is subject to the following condition: * * The above copyright notice and either this complete permission notice or at a * minimum a reference to the UPL must be included in all copies or substantial * portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package org.graalvm.wasm.constants; import java.lang.reflect.Field; import java.lang.reflect.Modifier; public class Bytecode { public static final int UNREACHABLE = 0x00; public static final int NOP = 0x01; public static final int SKIP_LABEL_U8 = 0x02; public static final int SKIP_LABEL_U16 = 0x03; public static final int RETURN = 0x04; public static final int LABEL_U8 = 0x05; public static final int LABEL_U16 = 0x06; public static final int LABEL_I32 = 0x07; public static final int LOOP = 0x08; public static final int IF = 0x09; public static final int SKIP_LABEL_I32 = 0x0A; public static final int BR_U8 = 0x0B; public static final int BR_I32 = 0x0C; public static final int BR_IF_U8 = 0x0D; public static final int BR_IF_I32 = 0x0E; public static final int BR_TABLE_U8 = 0x0F; public static final int BR_TABLE_I32 = 0x10; public static final int CALL_U8 = 0x11; public static final int CALL_I32 = 0x12; public static final int CALL_INDIRECT_U8 = 0x13; public static final int CALL_INDIRECT_I32 = 0x14; public static final int DROP = 0x15; public static final int DROP_OBJ = 0x16; public static final int SELECT = 0x17; public static final int SELECT_OBJ = 0x18; public static final int LOCAL_GET_U8 = 0x19; public static final int LOCAL_GET_I32 = 0x1A; public static final int LOCAL_GET_OBJ_U8 = 0x1B; public static final int LOCAL_GET_OBJ_I32 = 0x1C; public static final int LOCAL_SET_U8 = 0x1D; public static final int LOCAL_SET_I32 = 0x1E; public static final int LOCAL_SET_OBJ_U8 = 0x1F; public static final int LOCAL_SET_OBJ_I32 = 0x20; public static final int LOCAL_TEE_U8 = 0x21; public static final int LOCAL_TEE_I32 = 0x22; public static final int LOCAL_TEE_OBJ_U8 = 0x23; public static final int LOCAL_TEE_OBJ_I32 = 0x24; public static final int GLOBAL_GET_U8 = 0x25; public static final int GLOBAL_GET_I32 = 0x26; public static final int GLOBAL_SET_U8 = 0x27; public static final int GLOBAL_SET_I32 = 0x28; public static final int I32_LOAD = 0x29; public static final int I32_LOAD_U8 = 0x2A; public static final int I32_LOAD_I32 = 0x2B; public static final int I64_LOAD = 0x2C; public static final int I64_LOAD_U8 = 0x2D; public static final int I64_LOAD_I32 = 0x2E; public static final int F32_LOAD = 0x2F; public static final int F32_LOAD_U8 = 0x30; public static final int F32_LOAD_I32 = 0x31; public static final int F64_LOAD = 0x32; public static final int F64_LOAD_U8 = 0x33; public static final int F64_LOAD_I32 = 0x34; public static final int I32_LOAD8_S = 0x35; public static final int I32_LOAD8_S_U8 = 0x36; public static final int I32_LOAD8_S_I32 = 0x37; public static final int I32_LOAD8_U = 0x38; public static final int I32_LOAD8_U_U8 = 0x39; public static final int I32_LOAD8_U_I32 = 0x3A; public static final int I32_LOAD16_S = 0x3B; public static final int I32_LOAD16_S_U8 = 0x3C; public static final int I32_LOAD16_S_I32 = 0x3D; public static final int I32_LOAD16_U = 0x3E; public static final int I32_LOAD16_U_U8 = 0x3F; public static final int I32_LOAD16_U_I32 = 0x40; public static final int I64_LOAD8_S = 0x41; public static final int I64_LOAD8_S_U8 = 0x42; public static final int I64_LOAD8_S_I32 = 0x43; public static final int I64_LOAD8_U = 0x44; public static final int I64_LOAD8_U_U8 = 0x45; public static final int I64_LOAD8_U_I32 = 0x46; public static final int I64_LOAD16_S = 0x47; public static final int I64_LOAD16_S_U8 = 0x48; public static final int I64_LOAD16_S_I32 = 0x49; public static final int I64_LOAD16_U = 0x4A; public static final int I64_LOAD16_U_U8 = 0x4B; public static final int I64_LOAD16_U_I32 = 0x4C; public static final int I64_LOAD32_S = 0x4D; public static final int I64_LOAD32_S_U8 = 0x4E; public static final int I64_LOAD32_S_I32 = 0x4F; public static final int I64_LOAD32_U = 0x50; public static final int I64_LOAD32_U_U8 = 0x51; public static final int I64_LOAD32_U_I32 = 0x52; public static final int I32_STORE = 0x53; public static final int I32_STORE_U8 = 0x54; public static final int I32_STORE_I32 = 0x55; public static final int I64_STORE = 0x56; public static final int I64_STORE_U8 = 0x57; public static final int I64_STORE_I32 = 0x58; public static final int F32_STORE = 0x59; public static final int F32_STORE_U8 = 0x5A; public static final int F32_STORE_I32 = 0x5B; public static final int F64_STORE = 0x5C; public static final int F64_STORE_U8 = 0x5D; public static final int F64_STORE_I32 = 0x5E; public static final int I32_STORE_8 = 0x5F; public static final int I32_STORE_8_U8 = 0x60; public static final int I32_STORE_8_I32 = 0x61; public static final int I32_STORE_16 = 0x62; public static final int I32_STORE_16_U8 = 0x63; public static final int I32_STORE_16_I32 = 0x64; public static final int I64_STORE_8 = 0x65; public static final int I64_STORE_8_U8 = 0x66; public static final int I64_STORE_8_I32 = 0x67; public static final int I64_STORE_16 = 0x68; public static final int I64_STORE_16_U8 = 0x69; public static final int I64_STORE_16_I32 = 0x6A; public static final int I64_STORE_32 = 0x6B; public static final int I64_STORE_32_U8 = 0x6C; public static final int I64_STORE_32_I32 = 0x6D; public static final int MEMORY_SIZE = 0x6E; public static final int MEMORY_GROW = 0x6F; public static final int I32_CONST_I8 = 0x70; public static final int I32_CONST_I32 = 0x71; public static final int I64_CONST_I8 = 0x72; public static final int I64_CONST_I64 = 0x73; public static final int F32_CONST = 0x74; public static final int F64_CONST = 0x75; public static final int I32_EQZ = 0x76; public static final int I32_EQ = 0x77; public static final int I32_NE = 0x78; public static final int I32_LT_S = 0x79; public static final int I32_LT_U = 0x7A; public static final int I32_GT_S = 0x7B; public static final int I32_GT_U = 0x7C; public static final int I32_LE_S = 0x7D; public static final int I32_LE_U = 0x7E; public static final int I32_GE_S = 0x7F; public static final int I32_GE_U = 0x80; public static final int I64_EQZ = 0x81; public static final int I64_EQ = 0x82; public static final int I64_NE = 0x83; public static final int I64_LT_S = 0x84; public static final int I64_LT_U = 0x85; public static final int I64_GT_S = 0x86; public static final int I64_GT_U = 0x87; public static final int I64_LE_S = 0x88; public static final int I64_LE_U = 0x89; public static final int I64_GE_S = 0x8A; public static final int I64_GE_U = 0x8B; public static final int F32_EQ = 0x8C; public static final int F32_NE = 0x8D; public static final int F32_LT = 0x8E; public static final int F32_GT = 0x8F; public static final int F32_LE = 0x90; public static final int F32_GE = 0x91; public static final int F64_EQ = 0x92; public static final int F64_NE = 0x93; public static final int F64_LT = 0x94; public static final int F64_GT = 0x95; public static final int F64_LE = 0x96; public static final int F64_GE = 0x97; public static final int I32_CLZ = 0x98; public static final int I32_CTZ = 0x99; public static final int I32_POPCNT = 0x9A; public static final int I32_ADD = 0x9B; public static final int I32_SUB = 0x9C; public static final int I32_MUL = 0x9D; public static final int I32_DIV_S = 0x9E; public static final int I32_DIV_U = 0x9F; public static final int I32_REM_S = 0xA0; public static final int I32_REM_U = 0xA1; public static final int I32_AND = 0xA2; public static final int I32_OR = 0xA3; public static final int I32_XOR = 0xA4; public static final int I32_SHL = 0xA5; public static final int I32_SHR_S = 0xA6; public static final int I32_SHR_U = 0xA7; public static final int I32_ROTL = 0xA8; public static final int I32_ROTR = 0xA9; public static final int I64_CLZ = 0xAA; public static final int I64_CTZ = 0xAB; public static final int I64_POPCNT = 0xAC; public static final int I64_ADD = 0xAD; public static final int I64_SUB = 0xAE; public static final int I64_MUL = 0xAF; public static final int I64_DIV_S = 0xB0; public static final int I64_DIV_U = 0xB1; public static final int I64_REM_S = 0xB2; public static final int I64_REM_U = 0xB3; public static final int I64_AND = 0xB4; public static final int I64_OR = 0xB5; public static final int I64_XOR = 0xB6; public static final int I64_SHL = 0xB7; public static final int I64_SHR_S = 0xB8; public static final int I64_SHR_U = 0xB9; public static final int I64_ROTL = 0xBA; public static final int I64_ROTR = 0xBB; public static final int F32_ABS = 0xBC; public static final int F32_NEG = 0xBD; public static final int F32_CEIL = 0xBE; public static final int F32_FLOOR = 0xBF; public static final int F32_TRUNC = 0xC0; public static final int F32_NEAREST = 0xC1; public static final int F32_SQRT = 0xC2; public static final int F32_ADD = 0xC3; public static final int F32_SUB = 0xC4; public static final int F32_MUL = 0xC5; public static final int F32_DIV = 0xC6; public static final int F32_MIN = 0xC7; public static final int F32_MAX = 0xC8; public static final int F32_COPYSIGN = 0xC9; public static final int F64_ABS = 0xCA; public static final int F64_NEG = 0xCB; public static final int F64_CEIL = 0xCC; public static final int F64_FLOOR = 0xCD; public static final int F64_TRUNC = 0xCE; public static final int F64_NEAREST = 0xCF; public static final int F64_SQRT = 0xD0; public static final int F64_ADD = 0xD1; public static final int F64_SUB = 0xD2; public static final int F64_MUL = 0xD3; public static final int F64_DIV = 0xD4; public static final int F64_MIN = 0xD5; public static final int F64_MAX = 0xD6; public static final int F64_COPYSIGN = 0xD7; public static final int I32_WRAP_I64 = 0xD8; public static final int I32_TRUNC_F32_S = 0xD9; public static final int I32_TRUNC_F32_U = 0xDA; public static final int I32_TRUNC_F64_S = 0xDB; public static final int I32_TRUNC_F64_U = 0xDC; public static final int I64_EXTEND_I32_S = 0xDD; public static final int I64_EXTEND_I32_U = 0xDE; public static final int I64_TRUNC_F32_S = 0xDF; public static final int I64_TRUNC_F32_U = 0xE0; public static final int I64_TRUNC_F64_S = 0xE1; public static final int I64_TRUNC_F64_U = 0xE2; public static final int F32_CONVERT_I32_S = 0xE3; public static final int F32_CONVERT_I32_U = 0xE4; public static final int F32_CONVERT_I64_S = 0xE5; public static final int F32_CONVERT_I64_U = 0xE6; public static final int F32_DEMOTE_F64 = 0xE7; public static final int F64_CONVERT_I32_S = 0xE8; public static final int F64_CONVERT_I32_U = 0xE9; public static final int F64_CONVERT_I64_S = 0xEA; public static final int F64_CONVERT_I64_U = 0xEB; public static final int F64_PROMOTE_F32 = 0xEC; public static final int I32_REINTERPRET_F32 = 0xED; public static final int I64_REINTERPRET_F64 = 0xEE; public static final int F32_REINTERPRET_I32 = 0xEF; public static final int F64_REINTERPRET_I64 = 0xF0; public static final int I32_EXTEND8_S = 0xF1; public static final int I32_EXTEND16_S = 0xF2; public static final int I64_EXTEND8_S = 0xF3; public static final int I64_EXTEND16_S = 0xF4; public static final int I64_EXTEND32_S = 0xF5; public static final int REF_NULL = 0xF6; public static final int REF_IS_NULL = 0xF7; public static final int REF_FUNC = 0xF8; public static final int TABLE_GET = 0xF9; public static final int TABLE_SET = 0xFA; public static final int MISC = 0xFB; public static final int ATOMIC = 0xFC; public static final int VECTOR = 0xFD; public static final int NOTIFY = 0xFE; // Misc opcodes public static final int I32_TRUNC_SAT_F32_S = 0x00; public static final int I32_TRUNC_SAT_F32_U = 0x01; public static final int I32_TRUNC_SAT_F64_S = 0x02; public static final int I32_TRUNC_SAT_F64_U = 0x03; public static final int I64_TRUNC_SAT_F32_S = 0x04; public static final int I64_TRUNC_SAT_F32_U = 0x05; public static final int I64_TRUNC_SAT_F64_S = 0x06; public static final int I64_TRUNC_SAT_F64_U = 0x07; public static final int MEMORY_INIT = 0x08; public static final int MEMORY64_INIT = 0x0A; public static final int DATA_DROP = 0x0C; public static final int DATA_DROP_UNSAFE = 0x0D; public static final int MEMORY_COPY = 0x0E; public static final int MEMORY64_COPY_D32_S64 = 0x0F; public static final int MEMORY64_COPY_D64_S32 = 0x10; public static final int MEMORY64_COPY_D64_S64 = 0x11; public static final int MEMORY_FILL = 0x12; public static final int MEMORY64_FILL = 0x13; public static final int MEMORY64_SIZE = 0x14; public static final int MEMORY64_GROW = 0x15; public static final int TABLE_INIT = 0x16; public static final int ELEM_DROP = 0x17; public static final int TABLE_COPY = 0x18; public static final int TABLE_GROW = 0x19; public static final int TABLE_SIZE = 0x1A; public static final int TABLE_FILL = 0x1B; // Exception opcodes public static final int THROW = 0x1C; public static final int THROW_REF = 0x1D; // Atomic opcodes public static final int ATOMIC_I32_LOAD = 0x00; public static final int ATOMIC_I64_LOAD = 0x01; public static final int ATOMIC_I32_LOAD8_U = 0x02; public static final int ATOMIC_I32_LOAD16_U = 0x03; public static final int ATOMIC_I64_LOAD8_U = 0x04; public static final int ATOMIC_I64_LOAD16_U = 0x05; public static final int ATOMIC_I64_LOAD32_U = 0x06; public static final int ATOMIC_I32_STORE = 0x07; public static final int ATOMIC_I64_STORE = 0x08; public static final int ATOMIC_I32_STORE8 = 0x09; public static final int ATOMIC_I32_STORE16 = 0x0A; public static final int ATOMIC_I64_STORE8 = 0x0B; public static final int ATOMIC_I64_STORE16 = 0x0C; public static final int ATOMIC_I64_STORE32 = 0x0D; public static final int ATOMIC_I32_RMW_ADD = 0x0E; public static final int ATOMIC_I64_RMW_ADD = 0x0F; public static final int ATOMIC_I32_RMW8_U_ADD = 0x10; public static final int ATOMIC_I32_RMW16_U_ADD = 0x11; public static final int ATOMIC_I64_RMW8_U_ADD = 0x12; public static final int ATOMIC_I64_RMW16_U_ADD = 0x13; public static final int ATOMIC_I64_RMW32_U_ADD = 0x14; public static final int ATOMIC_I32_RMW_SUB = 0x15; public static final int ATOMIC_I64_RMW_SUB = 0x16; public static final int ATOMIC_I32_RMW8_U_SUB = 0x17; public static final int ATOMIC_I32_RMW16_U_SUB = 0x18; public static final int ATOMIC_I64_RMW8_U_SUB = 0x19; public static final int ATOMIC_I64_RMW16_U_SUB = 0x1A; public static final int ATOMIC_I64_RMW32_U_SUB = 0x1B; public static final int ATOMIC_I32_RMW_AND = 0x1C; public static final int ATOMIC_I64_RMW_AND = 0x1D; public static final int ATOMIC_I32_RMW8_U_AND = 0x1E; public static final int ATOMIC_I32_RMW16_U_AND = 0x1F; public static final int ATOMIC_I64_RMW8_U_AND = 0x20; public static final int ATOMIC_I64_RMW16_U_AND = 0x21; public static final int ATOMIC_I64_RMW32_U_AND = 0x22; public static final int ATOMIC_I32_RMW_OR = 0x23; public static final int ATOMIC_I64_RMW_OR = 0x24; public static final int ATOMIC_I32_RMW8_U_OR = 0x25; public static final int ATOMIC_I32_RMW16_U_OR = 0x26; public static final int ATOMIC_I64_RMW8_U_OR = 0x27; public static final int ATOMIC_I64_RMW16_U_OR = 0x28; public static final int ATOMIC_I64_RMW32_U_OR = 0x29; public static final int ATOMIC_I32_RMW_XOR = 0x2A; public static final int ATOMIC_I64_RMW_XOR = 0x2B; public static final int ATOMIC_I32_RMW8_U_XOR = 0x2C; public static final int ATOMIC_I32_RMW16_U_XOR = 0x2D; public static final int ATOMIC_I64_RMW8_U_XOR = 0x2E; public static final int ATOMIC_I64_RMW16_U_XOR = 0x2F; public static final int ATOMIC_I64_RMW32_U_XOR = 0x30; public static final int ATOMIC_I32_RMW_XCHG = 0x31; public static final int ATOMIC_I64_RMW_XCHG = 0x32; public static final int ATOMIC_I32_RMW8_U_XCHG = 0x33; public static final int ATOMIC_I32_RMW16_U_XCHG = 0x34; public static final int ATOMIC_I64_RMW8_U_XCHG = 0x35; public static final int ATOMIC_I64_RMW16_U_XCHG = 0x36; public static final int ATOMIC_I64_RMW32_U_XCHG = 0x37; public static final int ATOMIC_I32_RMW_CMPXCHG = 0x38; public static final int ATOMIC_I64_RMW_CMPXCHG = 0x39; public static final int ATOMIC_I32_RMW8_U_CMPXCHG = 0x3A; public static final int ATOMIC_I32_RMW16_U_CMPXCHG = 0x3B; public static final int ATOMIC_I64_RMW8_U_CMPXCHG = 0x3C; public static final int ATOMIC_I64_RMW16_U_CMPXCHG = 0x3D; public static final int ATOMIC_I64_RMW32_U_CMPXCHG = 0x3E; public static final int ATOMIC_FENCE = 0x3F; public static final int ATOMIC_NOTIFY = 0x40; public static final int ATOMIC_WAIT32 = 0x41; public static final int ATOMIC_WAIT64 = 0x42; // Vector opcodes public static final int VECTOR_V128_LOAD = 0x00; public static final int VECTOR_V128_LOAD8X8_S = 0x01; public static final int VECTOR_V128_LOAD8X8_U = 0x02; public static final int VECTOR_V128_LOAD16X4_S = 0x03; public static final int VECTOR_V128_LOAD16X4_U = 0x04; public static final int VECTOR_V128_LOAD32X2_S = 0x05; public static final int VECTOR_V128_LOAD32X2_U = 0x06; public static final int VECTOR_V128_LOAD8_SPLAT = 0x07; public static final int VECTOR_V128_LOAD16_SPLAT = 0x08; public static final int VECTOR_V128_LOAD32_SPLAT = 0x09; public static final int VECTOR_V128_LOAD64_SPLAT = 0x0A; public static final int VECTOR_V128_LOAD32_ZERO = 0x5C; public static final int VECTOR_V128_LOAD64_ZERO = 0x5D; public static final int VECTOR_V128_STORE = 0x0B; public static final int VECTOR_V128_LOAD8_LANE = 0x54; public static final int VECTOR_V128_LOAD16_LANE = 0x55; public static final int VECTOR_V128_LOAD32_LANE = 0x56; public static final int VECTOR_V128_LOAD64_LANE = 0x57; public static final int VECTOR_V128_STORE8_LANE = 0x58; public static final int VECTOR_V128_STORE16_LANE = 0x59; public static final int VECTOR_V128_STORE32_LANE = 0x5A; public static final int VECTOR_V128_STORE64_LANE = 0x5B; public static final int VECTOR_V128_CONST = 0x0C; public static final int VECTOR_I8X16_SHUFFLE = 0x0D; public static final int VECTOR_I8X16_EXTRACT_LANE_S = 0x15; public static final int VECTOR_I8X16_EXTRACT_LANE_U = 0x16; public static final int VECTOR_I8X16_REPLACE_LANE = 0x17; public static final int VECTOR_I16X8_EXTRACT_LANE_S = 0x18; public static final int VECTOR_I16X8_EXTRACT_LANE_U = 0x19; public static final int VECTOR_I16X8_REPLACE_LANE = 0x1A; public static final int VECTOR_I32X4_EXTRACT_LANE = 0x1B; public static final int VECTOR_I32X4_REPLACE_LANE = 0x1C; public static final int VECTOR_I64X2_EXTRACT_LANE = 0x1D; public static final int VECTOR_I64X2_REPLACE_LANE = 0x1E; public static final int VECTOR_F32X4_EXTRACT_LANE = 0x1F; public static final int VECTOR_F32X4_REPLACE_LANE = 0x20; public static final int VECTOR_F64X2_EXTRACT_LANE = 0x21; public static final int VECTOR_F64X2_REPLACE_LANE = 0x22; public static final int VECTOR_I8X16_SWIZZLE = 0x0E; public static final int VECTOR_I8X16_SPLAT = 0x0F; public static final int VECTOR_I16X8_SPLAT = 0x10; public static final int VECTOR_I32X4_SPLAT = 0x11; public static final int VECTOR_I64X2_SPLAT = 0x12; public static final int VECTOR_F32X4_SPLAT = 0x13; public static final int VECTOR_F64X2_SPLAT = 0x14; public static final int VECTOR_I8X16_EQ = 0x23; public static final int VECTOR_I8X16_NE = 0x24; public static final int VECTOR_I8X16_LT_S = 0x25; public static final int VECTOR_I8X16_LT_U = 0x26; public static final int VECTOR_I8X16_GT_S = 0x27; public static final int VECTOR_I8X16_GT_U = 0x28; public static final int VECTOR_I8X16_LE_S = 0x29; public static final int VECTOR_I8X16_LE_U = 0x2A; public static final int VECTOR_I8X16_GE_S = 0x2B; public static final int VECTOR_I8X16_GE_U = 0x2C; public static final int VECTOR_I16X8_EQ = 0x2D; public static final int VECTOR_I16X8_NE = 0x2E; public static final int VECTOR_I16X8_LT_S = 0x2F; public static final int VECTOR_I16X8_LT_U = 0x30; public static final int VECTOR_I16X8_GT_S = 0x31; public static final int VECTOR_I16X8_GT_U = 0x32; public static final int VECTOR_I16X8_LE_S = 0x33; public static final int VECTOR_I16X8_LE_U = 0x34; public static final int VECTOR_I16X8_GE_S = 0x35; public static final int VECTOR_I16X8_GE_U = 0x36; public static final int VECTOR_I32X4_EQ = 0x37; public static final int VECTOR_I32X4_NE = 0x38; public static final int VECTOR_I32X4_LT_S = 0x39; public static final int VECTOR_I32X4_LT_U = 0x3A; public static final int VECTOR_I32X4_GT_S = 0x3B; public static final int VECTOR_I32X4_GT_U = 0x3C; public static final int VECTOR_I32X4_LE_S = 0x3D; public static final int VECTOR_I32X4_LE_U = 0x3E; public static final int VECTOR_I32X4_GE_S = 0x3F; public static final int VECTOR_I32X4_GE_U = 0x40; public static final int VECTOR_I64X2_EQ = 0xD6; public static final int VECTOR_I64X2_NE = 0xD7; public static final int VECTOR_I64X2_LT_S = 0xD8; public static final int VECTOR_I64X2_GT_S = 0xD9; public static final int VECTOR_I64X2_LE_S = 0xDA; public static final int VECTOR_I64X2_GE_S = 0xDB; public static final int VECTOR_F32X4_EQ = 0x41; public static final int VECTOR_F32X4_NE = 0x42; public static final int VECTOR_F32X4_LT = 0x43; public static final int VECTOR_F32X4_GT = 0x44; public static final int VECTOR_F32X4_LE = 0x45; public static final int VECTOR_F32X4_GE = 0x46; public static final int VECTOR_F64X2_EQ = 0x47; public static final int VECTOR_F64X2_NE = 0x48; public static final int VECTOR_F64X2_LT = 0x49; public static final int VECTOR_F64X2_GT = 0x4A; public static final int VECTOR_F64X2_LE = 0x4B; public static final int VECTOR_F64X2_GE = 0x4C; public static final int VECTOR_V128_NOT = 0x4D; public static final int VECTOR_V128_AND = 0x4E; public static final int VECTOR_V128_ANDNOT = 0x4F; public static final int VECTOR_V128_OR = 0x50; public static final int VECTOR_V128_XOR = 0x51; public static final int VECTOR_V128_BITSELECT = 0x52; public static final int VECTOR_V128_ANY_TRUE = 0x53; public static final int VECTOR_I8X16_ABS = 0x60; public static final int VECTOR_I8X16_NEG = 0x61; public static final int VECTOR_I8X16_POPCNT = 0x62; public static final int VECTOR_I8X16_ALL_TRUE = 0x63; public static final int VECTOR_I8X16_BITMASK = 0x64; public static final int VECTOR_I8X16_NARROW_I16X8_S = 0x65; public static final int VECTOR_I8X16_NARROW_I16X8_U = 0x66; public static final int VECTOR_I8X16_SHL = 0x6B; public static final int VECTOR_I8X16_SHR_S = 0x6C; public static final int VECTOR_I8X16_SHR_U = 0x6D; public static final int VECTOR_I8X16_ADD = 0x6E; public static final int VECTOR_I8X16_ADD_SAT_S = 0x6F; public static final int VECTOR_I8X16_ADD_SAT_U = 0x70; public static final int VECTOR_I8X16_SUB = 0x71; public static final int VECTOR_I8X16_SUB_SAT_S = 0x72; public static final int VECTOR_I8X16_SUB_SAT_U = 0x73; public static final int VECTOR_I8X16_MIN_S = 0x76; public static final int VECTOR_I8X16_MIN_U = 0x77; public static final int VECTOR_I8X16_MAX_S = 0x78; public static final int VECTOR_I8X16_MAX_U = 0x79; public static final int VECTOR_I8X16_AVGR_U = 0x7B; public static final int VECTOR_I16X8_EXTADD_PAIRWISE_I8X16_S = 0x7C; public static final int VECTOR_I16X8_EXTADD_PAIRWISE_I8X16_U = 0x7D; public static final int VECTOR_I16X8_ABS = 0x80; public static final int VECTOR_I16X8_NEG = 0x81; public static final int VECTOR_I16X8_Q15MULR_SAT_S = 0x82; public static final int VECTOR_I16X8_ALL_TRUE = 0x83; public static final int VECTOR_I16X8_BITMASK = 0x84; public static final int VECTOR_I16X8_NARROW_I32X4_S = 0x85; public static final int VECTOR_I16X8_NARROW_I32X4_U = 0x86; public static final int VECTOR_I16X8_EXTEND_LOW_I8X16_S = 0x87; public static final int VECTOR_I16X8_EXTEND_HIGH_I8X16_S = 0x88; public static final int VECTOR_I16X8_EXTEND_LOW_I8X16_U = 0x89; public static final int VECTOR_I16X8_EXTEND_HIGH_I8X16_U = 0x8A; public static final int VECTOR_I16X8_SHL = 0x8B; public static final int VECTOR_I16X8_SHR_S = 0x8C; public static final int VECTOR_I16X8_SHR_U = 0x8D; public static final int VECTOR_I16X8_ADD = 0x8E; public static final int VECTOR_I16X8_ADD_SAT_S = 0x8F; public static final int VECTOR_I16X8_ADD_SAT_U = 0x90; public static final int VECTOR_I16X8_SUB = 0x91; public static final int VECTOR_I16X8_SUB_SAT_S = 0x92; public static final int VECTOR_I16X8_SUB_SAT_U = 0x93; public static final int VECTOR_I16X8_MUL = 0x95; public static final int VECTOR_I16X8_MIN_S = 0x96; public static final int VECTOR_I16X8_MIN_U = 0x97; public static final int VECTOR_I16X8_MAX_S = 0x98; public static final int VECTOR_I16X8_MAX_U = 0x99; public static final int VECTOR_I16X8_AVGR_U = 0x9B; public static final int VECTOR_I16X8_EXTMUL_LOW_I8X16_S = 0x9C; public static final int VECTOR_I16X8_EXTMUL_HIGH_I8X16_S = 0x9D; public static final int VECTOR_I16X8_EXTMUL_LOW_I8X16_U = 0x9E; public static final int VECTOR_I16X8_EXTMUL_HIGH_I8X16_U = 0x9F; public static final int VECTOR_I32X4_EXTADD_PAIRWISE_I16X8_S = 0x7E; public static final int VECTOR_I32X4_EXTADD_PAIRWISE_I16X8_U = 0x7F; public static final int VECTOR_I32X4_ABS = 0xA0; public static final int VECTOR_I32X4_NEG = 0xA1; public static final int VECTOR_I32X4_ALL_TRUE = 0xA3; public static final int VECTOR_I32X4_BITMASK = 0xA4; public static final int VECTOR_I32X4_EXTEND_LOW_I16X8_S = 0xA7; public static final int VECTOR_I32X4_EXTEND_HIGH_I16X8_S = 0xA8; public static final int VECTOR_I32X4_EXTEND_LOW_I16X8_U = 0xA9; public static final int VECTOR_I32X4_EXTEND_HIGH_I16X8_U = 0xAA; public static final int VECTOR_I32X4_SHL = 0xAB; public static final int VECTOR_I32X4_SHR_S = 0xAC; public static final int VECTOR_I32X4_SHR_U = 0xAD; public static final int VECTOR_I32X4_ADD = 0xAE; public static final int VECTOR_I32X4_SUB = 0xB1; public static final int VECTOR_I32X4_MUL = 0xB5; public static final int VECTOR_I32X4_MIN_S = 0xB6; public static final int VECTOR_I32X4_MIN_U = 0xB7; public static final int VECTOR_I32X4_MAX_S = 0xB8; public static final int VECTOR_I32X4_MAX_U = 0xB9; public static final int VECTOR_I32X4_DOT_I16X8_S = 0xBA; public static final int VECTOR_I32X4_EXTMUL_LOW_I16X8_S = 0xBC; public static final int VECTOR_I32X4_EXTMUL_HIGH_I16X8_S = 0xBD; public static final int VECTOR_I32X4_EXTMUL_LOW_I16X8_U = 0xBE; public static final int VECTOR_I32X4_EXTMUL_HIGH_I16X8_U = 0xBF; public static final int VECTOR_I64X2_ABS = 0xC0; public static final int VECTOR_I64X2_NEG = 0xC1; public static final int VECTOR_I64X2_ALL_TRUE = 0xC3; public static final int VECTOR_I64X2_BITMASK = 0xC4; public static final int VECTOR_I64X2_EXTEND_LOW_I32X4_S = 0xC7; public static final int VECTOR_I64X2_EXTEND_HIGH_I32X4_S = 0xC8; public static final int VECTOR_I64X2_EXTEND_LOW_I32X4_U = 0xC9; public static final int VECTOR_I64X2_EXTEND_HIGH_I32X4_U = 0xCA; public static final int VECTOR_I64X2_SHL = 0xCB; public static final int VECTOR_I64X2_SHR_S = 0xCC; public static final int VECTOR_I64X2_SHR_U = 0xCD; public static final int VECTOR_I64X2_ADD = 0xCE; public static final int VECTOR_I64X2_SUB = 0xD1; public static final int VECTOR_I64X2_MUL = 0xD5; public static final int VECTOR_I64X2_EXTMUL_LOW_I32X4_S = 0xDC; public static final int VECTOR_I64X2_EXTMUL_HIGH_I32X4_S = 0xDD; public static final int VECTOR_I64X2_EXTMUL_LOW_I32X4_U = 0xDE; public static final int VECTOR_I64X2_EXTMUL_HIGH_I32X4_U = 0xDF; public static final int VECTOR_F32X4_CEIL = 0x67; public static final int VECTOR_F32X4_FLOOR = 0x68; public static final int VECTOR_F32X4_TRUNC = 0x69; public static final int VECTOR_F32X4_NEAREST = 0x6A; public static final int VECTOR_F32X4_ABS = 0xE0; public static final int VECTOR_F32X4_NEG = 0xE1; public static final int VECTOR_F32X4_SQRT = 0xE3; public static final int VECTOR_F32X4_ADD = 0xE4; public static final int VECTOR_F32X4_SUB = 0xE5; public static final int VECTOR_F32X4_MUL = 0xE6; public static final int VECTOR_F32X4_DIV = 0xE7; public static final int VECTOR_F32X4_MIN = 0xE8; public static final int VECTOR_F32X4_MAX = 0xE9; public static final int VECTOR_F32X4_PMIN = 0xEA; public static final int VECTOR_F32X4_PMAX = 0xEB; public static final int VECTOR_F64X2_CEIL = 0x74; public static final int VECTOR_F64X2_FLOOR = 0x75; public static final int VECTOR_F64X2_TRUNC = 0x7A; public static final int VECTOR_F64X2_NEAREST = 0x94; public static final int VECTOR_F64X2_ABS = 0xEC; public static final int VECTOR_F64X2_NEG = 0xED; public static final int VECTOR_F64X2_SQRT = 0xEF; public static final int VECTOR_F64X2_ADD = 0xF0; public static final int VECTOR_F64X2_SUB = 0xF1; public static final int VECTOR_F64X2_MUL = 0xF2; public static final int VECTOR_F64X2_DIV = 0xF3; public static final int VECTOR_F64X2_MIN = 0xF4; public static final int VECTOR_F64X2_MAX = 0xF5; public static final int VECTOR_F64X2_PMIN = 0xF6; public static final int VECTOR_F64X2_PMAX = 0xF7; public static final int VECTOR_I32X4_TRUNC_SAT_F32X4_S = 0xF8; public static final int VECTOR_I32X4_TRUNC_SAT_F32X4_U = 0xF9; public static final int VECTOR_F32X4_CONVERT_I32X4_S = 0xFA; public static final int VECTOR_F32X4_CONVERT_I32X4_U = 0xFB; public static final int VECTOR_I32X4_TRUNC_SAT_F64X2_S_ZERO = 0xFC; public static final int VECTOR_I32X4_TRUNC_SAT_F64X2_U_ZERO = 0xFD; public static final int VECTOR_F64X2_CONVERT_LOW_I32X4_S = 0xFE; public static final int VECTOR_F64X2_CONVERT_LOW_I32X4_U = 0xFF; public static final int VECTOR_F32X4_DEMOTE_F64X2_ZERO = 0x5E; public static final int VECTOR_F64X2_PROMOTE_LOW_F32X4 = 0x5F; // Relaxed SIMD // The binary encoding of the Relaxed SIMD instructions uses opcodes higher than 0xFF. To avoid // using multi-byte encoding for vector bytecodes, we make use of the unused opcodes in the // vector opcode space. There happen to be the exact same number of Relaxed SIMD instructions // as there are unused opcodes in the 0x00-0xFF range. A similar approach was used when // prototyping the Relaxed SIMD proposal. We use the same single-byte bytecodes as those given // by the "prototype opcode" column of // https://github.com/WebAssembly/relaxed-simd/blob/c3f9359af2cd607cc46b0a3274f90ea52543a2f2/proposals/relaxed-simd/Overview.md#binary-format // except for the last three instructions for which we use the last 3 unused opcodes in the // 0x00-0xFF range (0x9A, 0xBB, 0xC2). public static final int VECTOR_I8X16_RELAXED_SWIZZLE = 0xA2; public static final int VECTOR_I32X4_RELAXED_TRUNC_F32X4_S = 0xA5; public static final int VECTOR_I32X4_RELAXED_TRUNC_F32X4_U = 0xA6; public static final int VECTOR_I32X4_RELAXED_TRUNC_F64X2_S_ZERO = 0xC5; public static final int VECTOR_I32X4_RELAXED_TRUNC_F64X2_U_ZERO = 0xC6; public static final int VECTOR_F32X4_RELAXED_MADD = 0xAF; public static final int VECTOR_F32X4_RELAXED_NMADD = 0xB0; public static final int VECTOR_F64X2_RELAXED_MADD = 0xCF; public static final int VECTOR_F64X2_RELAXED_NMADD = 0xD0; public static final int VECTOR_I8X16_RELAXED_LANESELECT = 0xB2; public static final int VECTOR_I16X8_RELAXED_LANESELECT = 0xB3; public static final int VECTOR_I32X4_RELAXED_LANESELECT = 0xD2; public static final int VECTOR_I64X2_RELAXED_LANESELECT = 0xD3; public static final int VECTOR_F32X4_RELAXED_MIN = 0xB4; public static final int VECTOR_F32X4_RELAXED_MAX = 0xE2; public static final int VECTOR_F64X2_RELAXED_MIN = 0xD4; public static final int VECTOR_F64X2_RELAXED_MAX = 0xEE; public static final int VECTOR_I16X8_RELAXED_Q15MULR_S = 0x9A; public static final int VECTOR_I16X8_RELAXED_DOT_I8X16_I7X16_S = 0xBB; public static final int VECTOR_I32X4_RELAXED_DOT_I8X16_I7X16_ADD_S = 0xC2; public static final byte[] EMPTY_BYTES = {}; public static final int COMMON_BYTECODE_OFFSET = Bytecode.I32_EQ - Instructions.I32_EQ; private static final byte[] VECTOR_OPCODE_TO_BYTECODE; static { try { int maxOpcode = 0; for (Field f : Instructions.class.getDeclaredFields()) { if (Modifier.isStatic(f.getModifiers()) && f.getType() == int.class && f.getName().startsWith("VECTOR_")) { int opcode = f.getInt(null); maxOpcode = Math.max(maxOpcode, opcode); } } VECTOR_OPCODE_TO_BYTECODE = new byte[maxOpcode + 1]; for (Field f : Instructions.class.getDeclaredFields()) { if (Modifier.isStatic(f.getModifiers()) && f.getType() == int.class && f.getName().startsWith("VECTOR_")) { int opcode = f.getInt(null); int bytecode = Bytecode.class.getDeclaredField(f.getName()).getInt(null); assert bytecode >= 0 && bytecode <= 0xFF; VECTOR_OPCODE_TO_BYTECODE[opcode] = (byte) bytecode; } } } catch (IllegalAccessException | NoSuchFieldException e) { throw new RuntimeException(e); } } public static final int vectorOpcodeToBytecode(int opcode) { return Byte.toUnsignedInt(VECTOR_OPCODE_TO_BYTECODE[opcode]); } }
googleapis/google-cloud-java
36,516
java-redis/proto-google-cloud-redis-v1/src/main/java/com/google/cloud/redis/v1/WeeklyMaintenanceWindow.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/redis/v1/cloud_redis.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.redis.v1; /** * * * <pre> * Time window in which disruptive maintenance updates occur. Non-disruptive * updates can occur inside or outside this window. * </pre> * * Protobuf type {@code google.cloud.redis.v1.WeeklyMaintenanceWindow} */ public final class WeeklyMaintenanceWindow extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.redis.v1.WeeklyMaintenanceWindow) WeeklyMaintenanceWindowOrBuilder { private static final long serialVersionUID = 0L; // Use WeeklyMaintenanceWindow.newBuilder() to construct. private WeeklyMaintenanceWindow(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private WeeklyMaintenanceWindow() { day_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new WeeklyMaintenanceWindow(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.redis.v1.CloudRedisServiceV1Proto .internal_static_google_cloud_redis_v1_WeeklyMaintenanceWindow_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.redis.v1.CloudRedisServiceV1Proto .internal_static_google_cloud_redis_v1_WeeklyMaintenanceWindow_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.redis.v1.WeeklyMaintenanceWindow.class, com.google.cloud.redis.v1.WeeklyMaintenanceWindow.Builder.class); } private int bitField0_; public static final int DAY_FIELD_NUMBER = 1; private int day_ = 0; /** * * * <pre> * Required. The day of week that maintenance updates occur. * </pre> * * <code>.google.type.DayOfWeek day = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The enum numeric value on the wire for day. */ @java.lang.Override public int getDayValue() { return day_; } /** * * * <pre> * Required. The day of week that maintenance updates occur. * </pre> * * <code>.google.type.DayOfWeek day = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The day. */ @java.lang.Override public com.google.type.DayOfWeek getDay() { com.google.type.DayOfWeek result = com.google.type.DayOfWeek.forNumber(day_); return result == null ? com.google.type.DayOfWeek.UNRECOGNIZED : result; } public static final int START_TIME_FIELD_NUMBER = 2; private com.google.type.TimeOfDay startTime_; /** * * * <pre> * Required. Start time of the window in UTC time. * </pre> * * <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return Whether the startTime field is set. */ @java.lang.Override public boolean hasStartTime() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Start time of the window in UTC time. * </pre> * * <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The startTime. */ @java.lang.Override public com.google.type.TimeOfDay getStartTime() { return startTime_ == null ? com.google.type.TimeOfDay.getDefaultInstance() : startTime_; } /** * * * <pre> * Required. Start time of the window in UTC time. * </pre> * * <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ @java.lang.Override public com.google.type.TimeOfDayOrBuilder getStartTimeOrBuilder() { return startTime_ == null ? com.google.type.TimeOfDay.getDefaultInstance() : startTime_; } public static final int DURATION_FIELD_NUMBER = 3; private com.google.protobuf.Duration duration_; /** * * * <pre> * Output only. Duration of the maintenance window. The current window is * fixed at 1 hour. * </pre> * * <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the duration field is set. */ @java.lang.Override public boolean hasDuration() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Output only. Duration of the maintenance window. The current window is * fixed at 1 hour. * </pre> * * <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The duration. */ @java.lang.Override public com.google.protobuf.Duration getDuration() { return duration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : duration_; } /** * * * <pre> * Output only. Duration of the maintenance window. The current window is * fixed at 1 hour. * </pre> * * <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ @java.lang.Override public com.google.protobuf.DurationOrBuilder getDurationOrBuilder() { return duration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : duration_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (day_ != com.google.type.DayOfWeek.DAY_OF_WEEK_UNSPECIFIED.getNumber()) { output.writeEnum(1, day_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getStartTime()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(3, getDuration()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (day_ != com.google.type.DayOfWeek.DAY_OF_WEEK_UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, day_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getStartTime()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getDuration()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.redis.v1.WeeklyMaintenanceWindow)) { return super.equals(obj); } com.google.cloud.redis.v1.WeeklyMaintenanceWindow other = (com.google.cloud.redis.v1.WeeklyMaintenanceWindow) obj; if (day_ != other.day_) return false; if (hasStartTime() != other.hasStartTime()) return false; if (hasStartTime()) { if (!getStartTime().equals(other.getStartTime())) return false; } if (hasDuration() != other.hasDuration()) return false; if (hasDuration()) { if (!getDuration().equals(other.getDuration())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + DAY_FIELD_NUMBER; hash = (53 * hash) + day_; if (hasStartTime()) { hash = (37 * hash) + START_TIME_FIELD_NUMBER; hash = (53 * hash) + getStartTime().hashCode(); } if (hasDuration()) { hash = (37 * hash) + DURATION_FIELD_NUMBER; hash = (53 * hash) + getDuration().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.redis.v1.WeeklyMaintenanceWindow parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.redis.v1.WeeklyMaintenanceWindow parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.redis.v1.WeeklyMaintenanceWindow parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.redis.v1.WeeklyMaintenanceWindow parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.redis.v1.WeeklyMaintenanceWindow parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.redis.v1.WeeklyMaintenanceWindow parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.redis.v1.WeeklyMaintenanceWindow parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.redis.v1.WeeklyMaintenanceWindow parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.redis.v1.WeeklyMaintenanceWindow parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.redis.v1.WeeklyMaintenanceWindow parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.redis.v1.WeeklyMaintenanceWindow parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.redis.v1.WeeklyMaintenanceWindow parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.redis.v1.WeeklyMaintenanceWindow prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Time window in which disruptive maintenance updates occur. Non-disruptive * updates can occur inside or outside this window. * </pre> * * Protobuf type {@code google.cloud.redis.v1.WeeklyMaintenanceWindow} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.redis.v1.WeeklyMaintenanceWindow) com.google.cloud.redis.v1.WeeklyMaintenanceWindowOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.redis.v1.CloudRedisServiceV1Proto .internal_static_google_cloud_redis_v1_WeeklyMaintenanceWindow_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.redis.v1.CloudRedisServiceV1Proto .internal_static_google_cloud_redis_v1_WeeklyMaintenanceWindow_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.redis.v1.WeeklyMaintenanceWindow.class, com.google.cloud.redis.v1.WeeklyMaintenanceWindow.Builder.class); } // Construct using com.google.cloud.redis.v1.WeeklyMaintenanceWindow.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getStartTimeFieldBuilder(); getDurationFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; day_ = 0; startTime_ = null; if (startTimeBuilder_ != null) { startTimeBuilder_.dispose(); startTimeBuilder_ = null; } duration_ = null; if (durationBuilder_ != null) { durationBuilder_.dispose(); durationBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.redis.v1.CloudRedisServiceV1Proto .internal_static_google_cloud_redis_v1_WeeklyMaintenanceWindow_descriptor; } @java.lang.Override public com.google.cloud.redis.v1.WeeklyMaintenanceWindow getDefaultInstanceForType() { return com.google.cloud.redis.v1.WeeklyMaintenanceWindow.getDefaultInstance(); } @java.lang.Override public com.google.cloud.redis.v1.WeeklyMaintenanceWindow build() { com.google.cloud.redis.v1.WeeklyMaintenanceWindow result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.redis.v1.WeeklyMaintenanceWindow buildPartial() { com.google.cloud.redis.v1.WeeklyMaintenanceWindow result = new com.google.cloud.redis.v1.WeeklyMaintenanceWindow(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.redis.v1.WeeklyMaintenanceWindow result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.day_ = day_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.startTime_ = startTimeBuilder_ == null ? startTime_ : startTimeBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.duration_ = durationBuilder_ == null ? duration_ : durationBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.redis.v1.WeeklyMaintenanceWindow) { return mergeFrom((com.google.cloud.redis.v1.WeeklyMaintenanceWindow) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.redis.v1.WeeklyMaintenanceWindow other) { if (other == com.google.cloud.redis.v1.WeeklyMaintenanceWindow.getDefaultInstance()) return this; if (other.day_ != 0) { setDayValue(other.getDayValue()); } if (other.hasStartTime()) { mergeStartTime(other.getStartTime()); } if (other.hasDuration()) { mergeDuration(other.getDuration()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { day_ = input.readEnum(); bitField0_ |= 0x00000001; break; } // case 8 case 18: { input.readMessage(getStartTimeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 26: { input.readMessage(getDurationFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int day_ = 0; /** * * * <pre> * Required. The day of week that maintenance updates occur. * </pre> * * <code>.google.type.DayOfWeek day = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The enum numeric value on the wire for day. */ @java.lang.Override public int getDayValue() { return day_; } /** * * * <pre> * Required. The day of week that maintenance updates occur. * </pre> * * <code>.google.type.DayOfWeek day = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The enum numeric value on the wire for day to set. * @return This builder for chaining. */ public Builder setDayValue(int value) { day_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The day of week that maintenance updates occur. * </pre> * * <code>.google.type.DayOfWeek day = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The day. */ @java.lang.Override public com.google.type.DayOfWeek getDay() { com.google.type.DayOfWeek result = com.google.type.DayOfWeek.forNumber(day_); return result == null ? com.google.type.DayOfWeek.UNRECOGNIZED : result; } /** * * * <pre> * Required. The day of week that maintenance updates occur. * </pre> * * <code>.google.type.DayOfWeek day = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The day to set. * @return This builder for chaining. */ public Builder setDay(com.google.type.DayOfWeek value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; day_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Required. The day of week that maintenance updates occur. * </pre> * * <code>.google.type.DayOfWeek day = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearDay() { bitField0_ = (bitField0_ & ~0x00000001); day_ = 0; onChanged(); return this; } private com.google.type.TimeOfDay startTime_; private com.google.protobuf.SingleFieldBuilderV3< com.google.type.TimeOfDay, com.google.type.TimeOfDay.Builder, com.google.type.TimeOfDayOrBuilder> startTimeBuilder_; /** * * * <pre> * Required. Start time of the window in UTC time. * </pre> * * <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return Whether the startTime field is set. */ public boolean hasStartTime() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Start time of the window in UTC time. * </pre> * * <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The startTime. */ public com.google.type.TimeOfDay getStartTime() { if (startTimeBuilder_ == null) { return startTime_ == null ? com.google.type.TimeOfDay.getDefaultInstance() : startTime_; } else { return startTimeBuilder_.getMessage(); } } /** * * * <pre> * Required. Start time of the window in UTC time. * </pre> * * <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder setStartTime(com.google.type.TimeOfDay value) { if (startTimeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } startTime_ = value; } else { startTimeBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Start time of the window in UTC time. * </pre> * * <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder setStartTime(com.google.type.TimeOfDay.Builder builderForValue) { if (startTimeBuilder_ == null) { startTime_ = builderForValue.build(); } else { startTimeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Start time of the window in UTC time. * </pre> * * <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder mergeStartTime(com.google.type.TimeOfDay value) { if (startTimeBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && startTime_ != null && startTime_ != com.google.type.TimeOfDay.getDefaultInstance()) { getStartTimeBuilder().mergeFrom(value); } else { startTime_ = value; } } else { startTimeBuilder_.mergeFrom(value); } if (startTime_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. Start time of the window in UTC time. * </pre> * * <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder clearStartTime() { bitField0_ = (bitField0_ & ~0x00000002); startTime_ = null; if (startTimeBuilder_ != null) { startTimeBuilder_.dispose(); startTimeBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. Start time of the window in UTC time. * </pre> * * <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public com.google.type.TimeOfDay.Builder getStartTimeBuilder() { bitField0_ |= 0x00000002; onChanged(); return getStartTimeFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Start time of the window in UTC time. * </pre> * * <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public com.google.type.TimeOfDayOrBuilder getStartTimeOrBuilder() { if (startTimeBuilder_ != null) { return startTimeBuilder_.getMessageOrBuilder(); } else { return startTime_ == null ? com.google.type.TimeOfDay.getDefaultInstance() : startTime_; } } /** * * * <pre> * Required. Start time of the window in UTC time. * </pre> * * <code>.google.type.TimeOfDay start_time = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.type.TimeOfDay, com.google.type.TimeOfDay.Builder, com.google.type.TimeOfDayOrBuilder> getStartTimeFieldBuilder() { if (startTimeBuilder_ == null) { startTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.type.TimeOfDay, com.google.type.TimeOfDay.Builder, com.google.type.TimeOfDayOrBuilder>( getStartTime(), getParentForChildren(), isClean()); startTime_ = null; } return startTimeBuilder_; } private com.google.protobuf.Duration duration_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> durationBuilder_; /** * * * <pre> * Output only. Duration of the maintenance window. The current window is * fixed at 1 hour. * </pre> * * <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the duration field is set. */ public boolean hasDuration() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * Output only. Duration of the maintenance window. The current window is * fixed at 1 hour. * </pre> * * <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The duration. */ public com.google.protobuf.Duration getDuration() { if (durationBuilder_ == null) { return duration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : duration_; } else { return durationBuilder_.getMessage(); } } /** * * * <pre> * Output only. Duration of the maintenance window. The current window is * fixed at 1 hour. * </pre> * * <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder setDuration(com.google.protobuf.Duration value) { if (durationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } duration_ = value; } else { durationBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Output only. Duration of the maintenance window. The current window is * fixed at 1 hour. * </pre> * * <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder setDuration(com.google.protobuf.Duration.Builder builderForValue) { if (durationBuilder_ == null) { duration_ = builderForValue.build(); } else { durationBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Output only. Duration of the maintenance window. The current window is * fixed at 1 hour. * </pre> * * <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder mergeDuration(com.google.protobuf.Duration value) { if (durationBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && duration_ != null && duration_ != com.google.protobuf.Duration.getDefaultInstance()) { getDurationBuilder().mergeFrom(value); } else { duration_ = value; } } else { durationBuilder_.mergeFrom(value); } if (duration_ != null) { bitField0_ |= 0x00000004; onChanged(); } return this; } /** * * * <pre> * Output only. Duration of the maintenance window. The current window is * fixed at 1 hour. * </pre> * * <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder clearDuration() { bitField0_ = (bitField0_ & ~0x00000004); duration_ = null; if (durationBuilder_ != null) { durationBuilder_.dispose(); durationBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Output only. Duration of the maintenance window. The current window is * fixed at 1 hour. * </pre> * * <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.protobuf.Duration.Builder getDurationBuilder() { bitField0_ |= 0x00000004; onChanged(); return getDurationFieldBuilder().getBuilder(); } /** * * * <pre> * Output only. Duration of the maintenance window. The current window is * fixed at 1 hour. * </pre> * * <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.protobuf.DurationOrBuilder getDurationOrBuilder() { if (durationBuilder_ != null) { return durationBuilder_.getMessageOrBuilder(); } else { return duration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : duration_; } } /** * * * <pre> * Output only. Duration of the maintenance window. The current window is * fixed at 1 hour. * </pre> * * <code>.google.protobuf.Duration duration = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> getDurationFieldBuilder() { if (durationBuilder_ == null) { durationBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder>( getDuration(), getParentForChildren(), isClean()); duration_ = null; } return durationBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.redis.v1.WeeklyMaintenanceWindow) } // @@protoc_insertion_point(class_scope:google.cloud.redis.v1.WeeklyMaintenanceWindow) private static final com.google.cloud.redis.v1.WeeklyMaintenanceWindow DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.redis.v1.WeeklyMaintenanceWindow(); } public static com.google.cloud.redis.v1.WeeklyMaintenanceWindow getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<WeeklyMaintenanceWindow> PARSER = new com.google.protobuf.AbstractParser<WeeklyMaintenanceWindow>() { @java.lang.Override public WeeklyMaintenanceWindow parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<WeeklyMaintenanceWindow> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<WeeklyMaintenanceWindow> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.redis.v1.WeeklyMaintenanceWindow getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,795
java-compute/google-cloud-compute/src/main/java/com/google/cloud/compute/v1/stub/RegionTargetHttpsProxiesStubSettings.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.compute.v1.stub; import static com.google.cloud.compute.v1.RegionTargetHttpsProxiesClient.ListPagedResponse; import com.google.api.core.ApiFunction; import com.google.api.core.ApiFuture; import com.google.api.core.ObsoleteApi; import com.google.api.gax.core.GaxProperties; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.httpjson.GaxHttpJsonProperties; import com.google.api.gax.httpjson.HttpJsonTransportChannel; import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider; import com.google.api.gax.httpjson.ProtoOperationTransformers; import com.google.api.gax.longrunning.OperationSnapshot; import com.google.api.gax.longrunning.OperationTimedPollAlgorithm; import com.google.api.gax.retrying.RetrySettings; import com.google.api.gax.rpc.ApiCallContext; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.OperationCallSettings; import com.google.api.gax.rpc.PageContext; import com.google.api.gax.rpc.PagedCallSettings; import com.google.api.gax.rpc.PagedListDescriptor; import com.google.api.gax.rpc.PagedListResponseFactory; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.compute.v1.DeleteRegionTargetHttpsProxyRequest; import com.google.cloud.compute.v1.GetRegionTargetHttpsProxyRequest; import com.google.cloud.compute.v1.InsertRegionTargetHttpsProxyRequest; import com.google.cloud.compute.v1.ListRegionTargetHttpsProxiesRequest; import com.google.cloud.compute.v1.Operation; import com.google.cloud.compute.v1.PatchRegionTargetHttpsProxyRequest; import com.google.cloud.compute.v1.SetSslCertificatesRegionTargetHttpsProxyRequest; import com.google.cloud.compute.v1.SetUrlMapRegionTargetHttpsProxyRequest; import com.google.cloud.compute.v1.TargetHttpsProxy; import com.google.cloud.compute.v1.TargetHttpsProxyList; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import java.io.IOException; import java.time.Duration; import java.util.List; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link RegionTargetHttpsProxiesStub}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (compute.googleapis.com) and default port (443) are used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the * [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings) * of get: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * RegionTargetHttpsProxiesStubSettings.Builder regionTargetHttpsProxiesSettingsBuilder = * RegionTargetHttpsProxiesStubSettings.newBuilder(); * regionTargetHttpsProxiesSettingsBuilder * .getSettings() * .setRetrySettings( * regionTargetHttpsProxiesSettingsBuilder * .getSettings() * .getRetrySettings() * .toBuilder() * .setInitialRetryDelayDuration(Duration.ofSeconds(1)) * .setInitialRpcTimeoutDuration(Duration.ofSeconds(5)) * .setMaxAttempts(5) * .setMaxRetryDelayDuration(Duration.ofSeconds(30)) * .setMaxRpcTimeoutDuration(Duration.ofSeconds(60)) * .setRetryDelayMultiplier(1.3) * .setRpcTimeoutMultiplier(1.5) * .setTotalTimeoutDuration(Duration.ofSeconds(300)) * .build()); * RegionTargetHttpsProxiesStubSettings regionTargetHttpsProxiesSettings = * regionTargetHttpsProxiesSettingsBuilder.build(); * }</pre> * * Please refer to the [Client Side Retry * Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for * additional support in setting retries. * * <p>To configure the RetrySettings of a Long Running Operation method, create an * OperationTimedPollAlgorithm object and update the RPC's polling algorithm. For example, to * configure the RetrySettings for delete: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * RegionTargetHttpsProxiesStubSettings.Builder regionTargetHttpsProxiesSettingsBuilder = * RegionTargetHttpsProxiesStubSettings.newBuilder(); * TimedRetryAlgorithm timedRetryAlgorithm = * OperationalTimedPollAlgorithm.create( * RetrySettings.newBuilder() * .setInitialRetryDelayDuration(Duration.ofMillis(500)) * .setRetryDelayMultiplier(1.5) * .setMaxRetryDelayDuration(Duration.ofMillis(5000)) * .setTotalTimeoutDuration(Duration.ofHours(24)) * .build()); * regionTargetHttpsProxiesSettingsBuilder * .createClusterOperationSettings() * .setPollingAlgorithm(timedRetryAlgorithm) * .build(); * }</pre> */ @Generated("by gapic-generator-java") public class RegionTargetHttpsProxiesStubSettings extends StubSettings<RegionTargetHttpsProxiesStubSettings> { /** The default scopes of the service. */ private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES = ImmutableList.<String>builder() .add("https://www.googleapis.com/auth/compute") .add("https://www.googleapis.com/auth/cloud-platform") .build(); private final UnaryCallSettings<DeleteRegionTargetHttpsProxyRequest, Operation> deleteSettings; private final OperationCallSettings<DeleteRegionTargetHttpsProxyRequest, Operation, Operation> deleteOperationSettings; private final UnaryCallSettings<GetRegionTargetHttpsProxyRequest, TargetHttpsProxy> getSettings; private final UnaryCallSettings<InsertRegionTargetHttpsProxyRequest, Operation> insertSettings; private final OperationCallSettings<InsertRegionTargetHttpsProxyRequest, Operation, Operation> insertOperationSettings; private final PagedCallSettings< ListRegionTargetHttpsProxiesRequest, TargetHttpsProxyList, ListPagedResponse> listSettings; private final UnaryCallSettings<PatchRegionTargetHttpsProxyRequest, Operation> patchSettings; private final OperationCallSettings<PatchRegionTargetHttpsProxyRequest, Operation, Operation> patchOperationSettings; private final UnaryCallSettings<SetSslCertificatesRegionTargetHttpsProxyRequest, Operation> setSslCertificatesSettings; private final OperationCallSettings< SetSslCertificatesRegionTargetHttpsProxyRequest, Operation, Operation> setSslCertificatesOperationSettings; private final UnaryCallSettings<SetUrlMapRegionTargetHttpsProxyRequest, Operation> setUrlMapSettings; private final OperationCallSettings<SetUrlMapRegionTargetHttpsProxyRequest, Operation, Operation> setUrlMapOperationSettings; private static final PagedListDescriptor< ListRegionTargetHttpsProxiesRequest, TargetHttpsProxyList, TargetHttpsProxy> LIST_PAGE_STR_DESC = new PagedListDescriptor< ListRegionTargetHttpsProxiesRequest, TargetHttpsProxyList, TargetHttpsProxy>() { @Override public String emptyToken() { return ""; } @Override public ListRegionTargetHttpsProxiesRequest injectToken( ListRegionTargetHttpsProxiesRequest payload, String token) { return ListRegionTargetHttpsProxiesRequest.newBuilder(payload) .setPageToken(token) .build(); } @Override public ListRegionTargetHttpsProxiesRequest injectPageSize( ListRegionTargetHttpsProxiesRequest payload, int pageSize) { return ListRegionTargetHttpsProxiesRequest.newBuilder(payload) .setMaxResults(pageSize) .build(); } @Override public Integer extractPageSize(ListRegionTargetHttpsProxiesRequest payload) { return payload.getMaxResults(); } @Override public String extractNextToken(TargetHttpsProxyList payload) { return payload.getNextPageToken(); } @Override public Iterable<TargetHttpsProxy> extractResources(TargetHttpsProxyList payload) { return payload.getItemsList(); } }; private static final PagedListResponseFactory< ListRegionTargetHttpsProxiesRequest, TargetHttpsProxyList, ListPagedResponse> LIST_PAGE_STR_FACT = new PagedListResponseFactory< ListRegionTargetHttpsProxiesRequest, TargetHttpsProxyList, ListPagedResponse>() { @Override public ApiFuture<ListPagedResponse> getFuturePagedResponse( UnaryCallable<ListRegionTargetHttpsProxiesRequest, TargetHttpsProxyList> callable, ListRegionTargetHttpsProxiesRequest request, ApiCallContext context, ApiFuture<TargetHttpsProxyList> futureResponse) { PageContext< ListRegionTargetHttpsProxiesRequest, TargetHttpsProxyList, TargetHttpsProxy> pageContext = PageContext.create(callable, LIST_PAGE_STR_DESC, request, context); return ListPagedResponse.createAsync(pageContext, futureResponse); } }; /** Returns the object with the settings used for calls to delete. */ public UnaryCallSettings<DeleteRegionTargetHttpsProxyRequest, Operation> deleteSettings() { return deleteSettings; } /** Returns the object with the settings used for calls to delete. */ public OperationCallSettings<DeleteRegionTargetHttpsProxyRequest, Operation, Operation> deleteOperationSettings() { return deleteOperationSettings; } /** Returns the object with the settings used for calls to get. */ public UnaryCallSettings<GetRegionTargetHttpsProxyRequest, TargetHttpsProxy> getSettings() { return getSettings; } /** Returns the object with the settings used for calls to insert. */ public UnaryCallSettings<InsertRegionTargetHttpsProxyRequest, Operation> insertSettings() { return insertSettings; } /** Returns the object with the settings used for calls to insert. */ public OperationCallSettings<InsertRegionTargetHttpsProxyRequest, Operation, Operation> insertOperationSettings() { return insertOperationSettings; } /** Returns the object with the settings used for calls to list. */ public PagedCallSettings< ListRegionTargetHttpsProxiesRequest, TargetHttpsProxyList, ListPagedResponse> listSettings() { return listSettings; } /** Returns the object with the settings used for calls to patch. */ public UnaryCallSettings<PatchRegionTargetHttpsProxyRequest, Operation> patchSettings() { return patchSettings; } /** Returns the object with the settings used for calls to patch. */ public OperationCallSettings<PatchRegionTargetHttpsProxyRequest, Operation, Operation> patchOperationSettings() { return patchOperationSettings; } /** Returns the object with the settings used for calls to setSslCertificates. */ public UnaryCallSettings<SetSslCertificatesRegionTargetHttpsProxyRequest, Operation> setSslCertificatesSettings() { return setSslCertificatesSettings; } /** Returns the object with the settings used for calls to setSslCertificates. */ public OperationCallSettings< SetSslCertificatesRegionTargetHttpsProxyRequest, Operation, Operation> setSslCertificatesOperationSettings() { return setSslCertificatesOperationSettings; } /** Returns the object with the settings used for calls to setUrlMap. */ public UnaryCallSettings<SetUrlMapRegionTargetHttpsProxyRequest, Operation> setUrlMapSettings() { return setUrlMapSettings; } /** Returns the object with the settings used for calls to setUrlMap. */ public OperationCallSettings<SetUrlMapRegionTargetHttpsProxyRequest, Operation, Operation> setUrlMapOperationSettings() { return setUrlMapOperationSettings; } public RegionTargetHttpsProxiesStub createStub() throws IOException { if (getTransportChannelProvider() .getTransportName() .equals(HttpJsonTransportChannel.getHttpJsonTransportName())) { return HttpJsonRegionTargetHttpsProxiesStub.create(this); } throw new UnsupportedOperationException( String.format( "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns the default service name. */ @Override public String getServiceName() { return "compute"; } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return InstantiatingExecutorProvider.newBuilder(); } /** Returns the default service endpoint. */ @ObsoleteApi("Use getEndpoint() instead") public static String getDefaultEndpoint() { return "compute.googleapis.com:443"; } /** Returns the default mTLS service endpoint. */ public static String getDefaultMtlsEndpoint() { return "compute.mtls.googleapis.com:443"; } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return DEFAULT_SERVICE_SCOPES; } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return GoogleCredentialsProvider.newBuilder() .setScopesToApply(DEFAULT_SERVICE_SCOPES) .setUseJwtAccessWithScope(true); } /** Returns a builder for the default ChannelProvider for this service. */ public static InstantiatingHttpJsonChannelProvider.Builder defaultHttpJsonTransportProviderBuilder() { return InstantiatingHttpJsonChannelProvider.newBuilder(); } public static TransportChannelProvider defaultTransportChannelProvider() { return defaultHttpJsonTransportProviderBuilder().build(); } public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken( "gapic", GaxProperties.getLibraryVersion(RegionTargetHttpsProxiesStubSettings.class)) .setTransportToken( GaxHttpJsonProperties.getHttpJsonTokenName(), GaxHttpJsonProperties.getHttpJsonVersion()); } /** Returns a new builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected RegionTargetHttpsProxiesStubSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); deleteSettings = settingsBuilder.deleteSettings().build(); deleteOperationSettings = settingsBuilder.deleteOperationSettings().build(); getSettings = settingsBuilder.getSettings().build(); insertSettings = settingsBuilder.insertSettings().build(); insertOperationSettings = settingsBuilder.insertOperationSettings().build(); listSettings = settingsBuilder.listSettings().build(); patchSettings = settingsBuilder.patchSettings().build(); patchOperationSettings = settingsBuilder.patchOperationSettings().build(); setSslCertificatesSettings = settingsBuilder.setSslCertificatesSettings().build(); setSslCertificatesOperationSettings = settingsBuilder.setSslCertificatesOperationSettings().build(); setUrlMapSettings = settingsBuilder.setUrlMapSettings().build(); setUrlMapOperationSettings = settingsBuilder.setUrlMapOperationSettings().build(); } /** Builder for RegionTargetHttpsProxiesStubSettings. */ public static class Builder extends StubSettings.Builder<RegionTargetHttpsProxiesStubSettings, Builder> { private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders; private final UnaryCallSettings.Builder<DeleteRegionTargetHttpsProxyRequest, Operation> deleteSettings; private final OperationCallSettings.Builder< DeleteRegionTargetHttpsProxyRequest, Operation, Operation> deleteOperationSettings; private final UnaryCallSettings.Builder<GetRegionTargetHttpsProxyRequest, TargetHttpsProxy> getSettings; private final UnaryCallSettings.Builder<InsertRegionTargetHttpsProxyRequest, Operation> insertSettings; private final OperationCallSettings.Builder< InsertRegionTargetHttpsProxyRequest, Operation, Operation> insertOperationSettings; private final PagedCallSettings.Builder< ListRegionTargetHttpsProxiesRequest, TargetHttpsProxyList, ListPagedResponse> listSettings; private final UnaryCallSettings.Builder<PatchRegionTargetHttpsProxyRequest, Operation> patchSettings; private final OperationCallSettings.Builder< PatchRegionTargetHttpsProxyRequest, Operation, Operation> patchOperationSettings; private final UnaryCallSettings.Builder< SetSslCertificatesRegionTargetHttpsProxyRequest, Operation> setSslCertificatesSettings; private final OperationCallSettings.Builder< SetSslCertificatesRegionTargetHttpsProxyRequest, Operation, Operation> setSslCertificatesOperationSettings; private final UnaryCallSettings.Builder<SetUrlMapRegionTargetHttpsProxyRequest, Operation> setUrlMapSettings; private final OperationCallSettings.Builder< SetUrlMapRegionTargetHttpsProxyRequest, Operation, Operation> setUrlMapOperationSettings; private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions = ImmutableMap.builder(); definitions.put( "no_retry_1_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList())); definitions.put( "retry_policy_0_codes", ImmutableSet.copyOf( Lists.<StatusCode.Code>newArrayList( StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS; static { ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder(); RetrySettings settings = null; settings = RetrySettings.newBuilder() .setInitialRpcTimeoutDuration(Duration.ofMillis(600000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ofMillis(600000L)) .setTotalTimeoutDuration(Duration.ofMillis(600000L)) .build(); definitions.put("no_retry_1_params", settings); settings = RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(100L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelayDuration(Duration.ofMillis(60000L)) .setInitialRpcTimeoutDuration(Duration.ofMillis(600000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ofMillis(600000L)) .setTotalTimeoutDuration(Duration.ofMillis(600000L)) .build(); definitions.put("retry_policy_0_params", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(clientContext); deleteSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); deleteOperationSettings = OperationCallSettings.newBuilder(); getSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); insertSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); insertOperationSettings = OperationCallSettings.newBuilder(); listSettings = PagedCallSettings.newBuilder(LIST_PAGE_STR_FACT); patchSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); patchOperationSettings = OperationCallSettings.newBuilder(); setSslCertificatesSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); setSslCertificatesOperationSettings = OperationCallSettings.newBuilder(); setUrlMapSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); setUrlMapOperationSettings = OperationCallSettings.newBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( deleteSettings, getSettings, insertSettings, listSettings, patchSettings, setSslCertificatesSettings, setUrlMapSettings); initDefaults(this); } protected Builder(RegionTargetHttpsProxiesStubSettings settings) { super(settings); deleteSettings = settings.deleteSettings.toBuilder(); deleteOperationSettings = settings.deleteOperationSettings.toBuilder(); getSettings = settings.getSettings.toBuilder(); insertSettings = settings.insertSettings.toBuilder(); insertOperationSettings = settings.insertOperationSettings.toBuilder(); listSettings = settings.listSettings.toBuilder(); patchSettings = settings.patchSettings.toBuilder(); patchOperationSettings = settings.patchOperationSettings.toBuilder(); setSslCertificatesSettings = settings.setSslCertificatesSettings.toBuilder(); setSslCertificatesOperationSettings = settings.setSslCertificatesOperationSettings.toBuilder(); setUrlMapSettings = settings.setUrlMapSettings.toBuilder(); setUrlMapOperationSettings = settings.setUrlMapOperationSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( deleteSettings, getSettings, insertSettings, listSettings, patchSettings, setSslCertificatesSettings, setUrlMapSettings); } private static Builder createDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder initDefaults(Builder builder) { builder .deleteSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")); builder .getSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .insertSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")); builder .listSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .patchSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")); builder .setSslCertificatesSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")); builder .setUrlMapSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")); builder .deleteOperationSettings() .setInitialCallSettings( UnaryCallSettings .<DeleteRegionTargetHttpsProxyRequest, OperationSnapshot> newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create(Operation.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create(Operation.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(500L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelayDuration(Duration.ofMillis(20000L)) .setInitialRpcTimeoutDuration(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ZERO) .setTotalTimeoutDuration(Duration.ofMillis(600000L)) .build())); builder .insertOperationSettings() .setInitialCallSettings( UnaryCallSettings .<InsertRegionTargetHttpsProxyRequest, OperationSnapshot> newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create(Operation.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create(Operation.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(500L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelayDuration(Duration.ofMillis(20000L)) .setInitialRpcTimeoutDuration(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ZERO) .setTotalTimeoutDuration(Duration.ofMillis(600000L)) .build())); builder .patchOperationSettings() .setInitialCallSettings( UnaryCallSettings .<PatchRegionTargetHttpsProxyRequest, OperationSnapshot> newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create(Operation.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create(Operation.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(500L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelayDuration(Duration.ofMillis(20000L)) .setInitialRpcTimeoutDuration(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ZERO) .setTotalTimeoutDuration(Duration.ofMillis(600000L)) .build())); builder .setSslCertificatesOperationSettings() .setInitialCallSettings( UnaryCallSettings .<SetSslCertificatesRegionTargetHttpsProxyRequest, OperationSnapshot> newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create(Operation.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create(Operation.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(500L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelayDuration(Duration.ofMillis(20000L)) .setInitialRpcTimeoutDuration(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ZERO) .setTotalTimeoutDuration(Duration.ofMillis(600000L)) .build())); builder .setUrlMapOperationSettings() .setInitialCallSettings( UnaryCallSettings .<SetUrlMapRegionTargetHttpsProxyRequest, OperationSnapshot> newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create(Operation.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create(Operation.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(500L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelayDuration(Duration.ofMillis(20000L)) .setInitialRpcTimeoutDuration(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ZERO) .setTotalTimeoutDuration(Duration.ofMillis(600000L)) .build())); return builder; } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater); return this; } public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() { return unaryMethodSettingsBuilders; } /** Returns the builder for the settings used for calls to delete. */ public UnaryCallSettings.Builder<DeleteRegionTargetHttpsProxyRequest, Operation> deleteSettings() { return deleteSettings; } /** Returns the builder for the settings used for calls to delete. */ public OperationCallSettings.Builder<DeleteRegionTargetHttpsProxyRequest, Operation, Operation> deleteOperationSettings() { return deleteOperationSettings; } /** Returns the builder for the settings used for calls to get. */ public UnaryCallSettings.Builder<GetRegionTargetHttpsProxyRequest, TargetHttpsProxy> getSettings() { return getSettings; } /** Returns the builder for the settings used for calls to insert. */ public UnaryCallSettings.Builder<InsertRegionTargetHttpsProxyRequest, Operation> insertSettings() { return insertSettings; } /** Returns the builder for the settings used for calls to insert. */ public OperationCallSettings.Builder<InsertRegionTargetHttpsProxyRequest, Operation, Operation> insertOperationSettings() { return insertOperationSettings; } /** Returns the builder for the settings used for calls to list. */ public PagedCallSettings.Builder< ListRegionTargetHttpsProxiesRequest, TargetHttpsProxyList, ListPagedResponse> listSettings() { return listSettings; } /** Returns the builder for the settings used for calls to patch. */ public UnaryCallSettings.Builder<PatchRegionTargetHttpsProxyRequest, Operation> patchSettings() { return patchSettings; } /** Returns the builder for the settings used for calls to patch. */ public OperationCallSettings.Builder<PatchRegionTargetHttpsProxyRequest, Operation, Operation> patchOperationSettings() { return patchOperationSettings; } /** Returns the builder for the settings used for calls to setSslCertificates. */ public UnaryCallSettings.Builder<SetSslCertificatesRegionTargetHttpsProxyRequest, Operation> setSslCertificatesSettings() { return setSslCertificatesSettings; } /** Returns the builder for the settings used for calls to setSslCertificates. */ public OperationCallSettings.Builder< SetSslCertificatesRegionTargetHttpsProxyRequest, Operation, Operation> setSslCertificatesOperationSettings() { return setSslCertificatesOperationSettings; } /** Returns the builder for the settings used for calls to setUrlMap. */ public UnaryCallSettings.Builder<SetUrlMapRegionTargetHttpsProxyRequest, Operation> setUrlMapSettings() { return setUrlMapSettings; } /** Returns the builder for the settings used for calls to setUrlMap. */ public OperationCallSettings.Builder< SetUrlMapRegionTargetHttpsProxyRequest, Operation, Operation> setUrlMapOperationSettings() { return setUrlMapOperationSettings; } @Override public RegionTargetHttpsProxiesStubSettings build() throws IOException { return new RegionTargetHttpsProxiesStubSettings(this); } } }
google/closure-compiler
35,583
test/com/google/javascript/jscomp/MakeDeclaredNamesUniqueTest.java
/* * Copyright 2009 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.assertThrows; import com.google.javascript.jscomp.MakeDeclaredNamesUnique.InlineRenamer; import com.google.javascript.rhino.Node; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** * @author johnlenz@google.com (John Lenz) */ @RunWith(JUnit4.class) public final class MakeDeclaredNamesUniqueTest extends CompilerTestCase { // this.useDefaultRenamer = true; invokes the ContextualRenamer // this.useDefaultRenamer = false; invokes the InlineRenamer private boolean useDefaultRenamer = false; // invert = true; treats JavaScript input as normalized code and inverts the renaming // invert = false; conducts renaming private boolean invert = false; // removeConst = true; removes const-ness of a name (e.g. If the variable name is CONST) private boolean removeConst = false; // whether to throw an exception on any names newly made unique. private boolean assertOnChange; private static final String LOCAL_NAME_PREFIX = "unique_"; @Override protected CompilerPass getProcessor(final Compiler compiler) { if (!invert) { return new CompilerPass() { @Override public void process(Node externs, Node root) { MakeDeclaredNamesUnique.Builder renamer = MakeDeclaredNamesUnique.builder().withAssertOnChange(assertOnChange); if (!useDefaultRenamer) { renamer = renamer.withRenamer( new InlineRenamer( compiler.getCodingConvention(), compiler.getUniqueNameIdSupplier(), LOCAL_NAME_PREFIX, removeConst, true, null)); } NodeTraversal.traverseRoots(compiler, renamer.build(), externs, root); } }; } else { return MakeDeclaredNamesUnique.getContextualRenameInverter(compiler); } } @Override public CompilerOptions getOptions() { CompilerOptions options = super.getOptions(); options.setWarningLevel(DiagnosticGroups.MODULE_LOAD, CheckLevel.OFF); return options; } @Override protected int getNumRepetitions() { // The normalize pass is only run once. return 1; } @Before public void customSetUp() throws Exception { removeConst = false; invert = false; useDefaultRenamer = false; assertOnChange = false; } private void testWithInversion(String original, String expected) { invert = false; test(original, expected); invert = true; test(expected, original); invert = false; } private void testWithInversion(String[] original, String[] expected) { invert = false; test(srcs(original), expected(expected)); invert = true; test(srcs(expected), expected(original)); invert = false; } private void testSameWithInversion(String externs, String original) { invert = false; testSame(externs(externs), srcs(original)); invert = true; testSame(externs(externs), srcs(original)); invert = false; } private void testSameWithInversion(String original) { testSameWithInversion("", original); } private static String wrapInFunction(String s) { return "function f(){" + s + "}"; } private void testInFunctionWithInversion(String original, String expected) { testWithInversion(wrapInFunction(original), wrapInFunction(expected)); } @Test public void testMakeDeclaredNamesUniqueNullishCoalesce() { this.useDefaultRenamer = true; test( "var foo; var x = function foo(){var foo = false ?? {};}", "var foo; var x = function foo$jscomp$1(){var foo$jscomp$2 = false ?? {}}"); testSameWithInversion("var a = b ?? c;"); } @Test public void testShadowedBleedingName() { this.useDefaultRenamer = true; test( "var foo; var x = function foo(){var foo;}", "var foo; var x = function foo$jscomp$1(){var foo$jscomp$2}"); } @Test public void testMakeLocalNamesUniqueWithContext1() { this.useDefaultRenamer = true; invert = true; test( "var a;function foo(){var a$jscomp$inline_1; a = 1}", "var a;function foo(){var a$jscomp$0 ; a = 1}"); test( "var a;function foo(){var a$jscomp$inline_1;}", // "var a;function foo(){var a ;}"); test( "let a;function foo(){let a$jscomp$inline_1; a = 1}", "let a;function foo(){let a$jscomp$0 ; a = 1}"); test( "const a = 1;function foo(){let a$jscomp$inline_1;}", // "const a = 1;function foo(){let a ;}"); test( "class A {} function foo(){class A$jscomp$inline_1 {}}", "class A {} function foo(){class A {}}"); } @Test public void testMakeLocalNamesUniqueWithContext2() { // Set the test type this.useDefaultRenamer = true; // Verify global names are untouched. testSameWithInversion("var a;"); testSameWithInversion("let a;"); testSameWithInversion("const a = 0;"); // Verify global names are untouched. testSameWithInversion("a;"); // Local names are made unique. testWithInversion( "var a;function foo(a){var b;a}", "var a;function foo(a$jscomp$1){var b;a$jscomp$1}"); testWithInversion( "var a;function foo(){var b;a}function boo(){var b;a}", "var a;function foo(){var b;a}function boo(){var b$jscomp$1;a}"); testWithInversion( """ function foo(a){var b} function boo(a){var b} """, """ function foo(a){var b} function boo(a$jscomp$1){var b$jscomp$1} """); // variable b is left untouched because it is only declared once testWithInversion( "let a;function foo(a){let b;a}", "let a;function foo(a$jscomp$1){let b;a$jscomp$1}"); testWithInversion( "let a;function foo(){let b;a}function boo(){let b;a}", "let a;function foo(){let b;a}function boo(){let b$jscomp$1;a}"); testWithInversion( """ function foo(a){let b} function boo(a){let b} """, """ function foo(a){let b} function boo(a$jscomp$1){let b$jscomp$1} """); // Verify functions expressions are renamed. testWithInversion( "var a = function foo(){foo()};var b = function foo(){foo()};", "var a = function foo(){foo()};var b = function foo$jscomp$1(){foo$jscomp$1()};"); testWithInversion( "let a = function foo(){foo()};let b = function foo(){foo()};", "let a = function foo(){foo()};let b = function foo$jscomp$1(){foo$jscomp$1()};"); // Verify catch exceptions names are made unique testSameWithInversion("try { } catch(e) {e;}"); // Inversion does not handle exceptions correctly. test( "try { } catch(e) {e;}; try { } catch(e) {e;}", "try { } catch(e) {e;}; try { } catch(e$jscomp$1) {e$jscomp$1;}"); test( "try { } catch(e) {e; try { } catch(e) {e;}};", "try { } catch(e) {e; try { } catch(e$jscomp$1) {e$jscomp$1;} }; "); } @Test public void testMakeLocalNamesUniqueWithContext3() { // Set the test type this.useDefaultRenamer = true; String externs = "var extern1 = {};"; // Verify global names are untouched. testSameWithInversion(externs, "var extern1 = extern1 || {};"); // Verify global names are untouched. testSame(externs(externs), srcs("var extern1 = extern1 || {};")); } @Test public void testMakeLocalNamesUniqueWithContext4() { // Set the test type this.useDefaultRenamer = true; testInFunctionWithInversion( "var e; try { } catch(e) {e;}; try { } catch(e) {e;}", "var e; try { } catch(e$jscomp$1) {e$jscomp$1;}; try { } catch(e$jscomp$2) {e$jscomp$2;}"); testInFunctionWithInversion( "var e; try { } catch(e ) { e; try { } catch(e ) {e; } };", "var e; try { } catch(e$jscomp$1) {e$jscomp$1; try { } catch(e$jscomp$2) {e$jscomp$2;} };"); testInFunctionWithInversion( "try { } catch(e ) {e ;}; try { } catch(e ) {e ;}; var e;", "try { } catch(e$jscomp$1) {e$jscomp$1;}; try { } catch(e$jscomp$2) {e$jscomp$2;}; var e;"); testInFunctionWithInversion( "try { } catch(e ) {e ; try { } catch(e ) {e ;} }; var e;", "try { } catch(e$jscomp$1) {e$jscomp$1; try { } catch(e$jscomp$2) {e$jscomp$2;} }; var e;"); } @Test public void testMakeLocalNamesUniqueWithContext5() { this.useDefaultRenamer = true; testWithInversion("function f(){var f; f = 1}", "function f(){var f$jscomp$1; f$jscomp$1 = 1}"); } @Test public void testMakeLocalNamesUniqueWithContext6() { this.useDefaultRenamer = true; testWithInversion("function f(f){f = 1}", "function f(f$jscomp$1){f$jscomp$1 = 1}"); } @Test public void testMakeLocalNamesUniqueWithContext7() { this.useDefaultRenamer = true; testWithInversion( "function f(f){var f; f = 1}", "function f(f$jscomp$1){var f$jscomp$1; f$jscomp$1 = 1}"); } @Test public void testMakeLocalNamesUniqueWithContext8() { this.useDefaultRenamer = true; test( "var fn = function f(){var f; f = 1}", "var fn = function f(){var f$jscomp$1; f$jscomp$1 = 1}"); } @Test public void testMakeLocalNamesUniqueWithContext9() { this.useDefaultRenamer = true; testSame("var fn = function f(f){f = 1}"); } @Test public void testMakeLocalNamesUniqueWithContext10() { this.useDefaultRenamer = true; testSame("var fn = function f(f){var f; f = 1}"); } @Test public void testMakeLocalNamesUniqueWithContext11() { this.useDefaultRenamer = true; // Changes the parameter name if it's the same as loop object name test( """ var loopObjName = {}; for(;;) { var fn = (function f(loopObjName){ return function() {}; })(loopObjName); } """, """ var loopObjName = {}; for(;;) { var fn = (function f(loopObjName$jscomp$1){ return function() {}; })(loopObjName); } """); // parameter name is already unique; no change testSame( """ var foo = {}; for(;;) { var fn = (function f(bar){ return function() {}; })(foo);} """); } @Test public void testMakeFunctionsUniqueWithContext() { this.useDefaultRenamer = true; testSame("function f(){} function f(){}"); testSame("var x = function() {function f(){} function f(){}};"); } @Test public void testMakeFunctionsUniqueWithContext1() { this.useDefaultRenamer = true; test( "if (1) { function f(){} } else { function f(){} }", "if (1) { function f(){} } else { function f$jscomp$1(){} }"); } @Test public void testMakeFunctionsUniqueWithContext2() { this.useDefaultRenamer = true; testSame("if (1) { function f(){} function f(){} }"); } @Test public void testMakeFunctionsUniqueWithContext3() { this.useDefaultRenamer = true; test( "function f() {} if (1) { function f(){} function f(){} }", "function f() {} if (1) { function f$jscomp$1(){} function f$jscomp$1(){} }"); } @Test public void testArguments() { // Set the test type this.useDefaultRenamer = true; // Don't distinguish between "arguments", it can't be made unique. testSameWithInversion("function foo(){var arguments;function bar(){var arguments;}}"); invert = true; // Don't introduce new references to arguments, it is special. // Still try to rename it to a name that depends on the shape of the AST rather than // the process we happened to follow to reach that shape. test( "function foo(){var arguments$jscomp$1;}", // "function foo(){var arguments$jscomp$0;}"); } @Test public void testClassInForLoop() { useDefaultRenamer = true; testSame("for (class a {};;) { break; }"); } @Test public void testFunctionInForLoop() { useDefaultRenamer = true; testSame("for (function a() {};;) { break; }"); } @Test public void testLetsInSeparateBlocks() { useDefaultRenamer = true; test( """ if (x) { let e; alert(e); } if (y) { let e; alert(e); } """, """ if (x) { let e; alert(e); } if (y) { let e$jscomp$1; alert(e$jscomp$1); } """); } @Test public void testConstInGlobalHoistScope() { useDefaultRenamer = true; testSame( """ if (true) { const x = 1; alert(x); } """); test( """ if (true) { const x = 1; alert(x); } else { const x = 1; alert(x); } """, """ if (true) { const x = 1; alert(x); } else { const x$jscomp$1 = 1; alert(x$jscomp$1); } """); } @Test public void testMakeLocalNamesUniqueWithoutContext() { this.useDefaultRenamer = false; test("var a;", "var a$jscomp$unique_0"); test("let a;", "let a$jscomp$unique_0"); // Verify undeclared names are untouched. testSame("a;"); // Local names are made unique. test( """ var a; function foo(a){var b;a} """, """ var a$jscomp$unique_0; function foo$jscomp$unique_1(a$jscomp$unique_2){ var b$jscomp$unique_3;a$jscomp$unique_2} """); test( """ var a; function foo(){var b;a} function boo(){var b;a} """, """ var a$jscomp$unique_0; function foo$jscomp$unique_1(){var b$jscomp$unique_3;a$jscomp$unique_0} function boo$jscomp$unique_2(){var b$jscomp$unique_4;a$jscomp$unique_0} """); test( "let a; function foo(a) {let b; a; }", """ let a$jscomp$unique_0; function foo$jscomp$unique_1(a$jscomp$unique_2) { let b$jscomp$unique_3; a$jscomp$unique_2; } """); test( """ let a; function foo() { let b; a; } function boo() { let b; a; } """, """ let a$jscomp$unique_0; function foo$jscomp$unique_1() { let b$jscomp$unique_3; a$jscomp$unique_0; } function boo$jscomp$unique_2() { let b$jscomp$unique_4; a$jscomp$unique_0; } """); // Verify function expressions are renamed. test( "var a = function foo(){foo()};", "var a$jscomp$unique_0 = function foo$jscomp$unique_1(){foo$jscomp$unique_1()};"); test( "const a = function foo(){foo()};", "const a$jscomp$unique_0 = function foo$jscomp$unique_1(){foo$jscomp$unique_1()};"); // Verify catch exceptions names are made unique test("try { } catch(e) {e;}", "try { } catch(e$jscomp$unique_0) {e$jscomp$unique_0;}"); test( """ try { } catch(e) {e;}; try { } catch(e) {e;} """, """ try { } catch(e$jscomp$unique_0) {e$jscomp$unique_0;}; try { } catch(e$jscomp$unique_1) {e$jscomp$unique_1;} """); test( """ try { } catch(e) {e; try { } catch(e) {e;}}; """, """ try { } catch(e$jscomp$unique_0) {e$jscomp$unique_0; try { } catch(e$jscomp$unique_1) {e$jscomp$unique_1;} };\s """); } @Test public void testMakeLocalNamesUniqueWithoutContext2() { // Set the test type this.useDefaultRenamer = false; test("var _a;", "var JSCompiler__a$jscomp$unique_0"); test( "var _a = function _b(_c) { var _d; };", """ var JSCompiler__a$jscomp$unique_0 = function JSCompiler__b$jscomp$unique_2( JSCompiler__c$jscomp$unique_1) { var JSCompiler__d$jscomp$unique_3; }; """); test("let _a;", "let JSCompiler__a$jscomp$unique_0"); test( "const _a = function _b(_c) { let _d; };", """ const JSCompiler__a$jscomp$unique_0 = function JSCompiler__b$jscomp$unique_2( JSCompiler__c$jscomp$unique_1) { let JSCompiler__d$jscomp$unique_3; }; """); } @Test public void testOnlyInversion() { invert = true; test( "function f(a, a$jscomp$1) {}", // "function f(a, a$jscomp$0) {}"); test( "function f(a$jscomp$1, b$jscomp$2) {}", // "function f(a , b ) {}"); test( "function f(a$jscomp$1, a$jscomp$2) {}", // "function f(a , a$jscomp$0) {}"); test( "try { } catch(e) {e; try { } catch(e$jscomp$1) {e$jscomp$1;} }; ", "try { } catch(e) {e; try { } catch(e) {e;} }; "); testSame("var a$jscomp$1;"); testSame("const a$jscomp$1 = 1;"); testSame("function f() { var $jscomp$; }"); testSame("var CONST = 3; var b = CONST;"); test( "function f() {var CONST = 3; var ACONST$jscomp$1 = 2;}", "function f() {var CONST = 3; var ACONST = 2;}"); test( "function f() {const CONST = 3; const ACONST$jscomp$1 = 2;}", "function f() {const CONST = 3; const ACONST = 2;}"); } @Test public void testOnlyInversion2() { invert = true; test( "function f() {try { } catch(e) {e;}; try { } catch(e$jscomp$0) {e$jscomp$0;}}", "function f() {try { } catch(e) {e;}; try { } catch(e) {e;}}"); } @Test public void testOnlyInversion3() { invert = true; test( """ function x1() { var a$jscomp$1; function x2() { var a$jscomp$2; } function x3() { var a$jscomp$3; } } """, """ function x1() { var a; function x2() { var a; } function x3() { var a; } } """); } @Test public void testOnlyInversion4() { invert = true; testSame( """ function x1() { // The attempt to rename will re-generate this same exact name. // The purpose of this test is to make sure we don't accidentally report // this "renaming" to the same name as a change. var a$jscomp$0; function x2() { var a;a$jscomp$0++ } } """); } @Test public void testOnlyInversion5() { invert = true; test( """ function x1() { const a$jscomp$1 = 0; function x2() { const b$jscomp$1 = 0; } } """, """ function x1() { const a = 0; function x2() { const b = 0; } } """); } @Test public void testConstRemovingRename1() { removeConst = true; test( "(function () {var CONST = 3; var ACONST$jscomp$1 = 2;})", "(function () {var CONST$jscomp$unique_0 = 3; var ACONST$jscomp$unique_1 = 2;})"); } @Test public void testConstRemovingRename2() { removeConst = true; test( "var CONST = 3; var b = CONST;", "var CONST$jscomp$unique_0 = 3; var b$jscomp$unique_1 = CONST$jscomp$unique_0;"); } @Test public void testConstRemovingRenameAlsoRemovesAnnotation() { removeConst = true; test( "/** @const */ var c = 3; var b = c;", "/** blank */ var c$jscomp$unique_0 = 3; var b$jscomp$unique_1 = c$jscomp$unique_0;"); } @Test public void testRestParamWithoutContext() { test( "function f(...x) { x; }", "function f$jscomp$unique_0(...x$jscomp$unique_1) { x$jscomp$unique_1; }"); } @Test public void testRestParamWithContextWithInversion() { this.useDefaultRenamer = true; testWithInversion( """ let x = 0; function foo(...x) { return x[0]; } """, """ let x = 0; function foo(...x$jscomp$1) { return x$jscomp$1[0] } """); } @Test public void testVarParamSameName0() { test( """ function f(x) { if (!x) var x = 6; } """, """ function f$jscomp$unique_0(x$jscomp$unique_1) { if (!x$jscomp$unique_1) var x$jscomp$unique_1 = 6; } """); } @Test public void testVarParamSameName1() { test( """ function f(x) { if (!x) x = 6; } """, """ function f$jscomp$unique_0(x$jscomp$unique_1) { if (!x$jscomp$unique_1) x$jscomp$unique_1 = 6; } """); } @Test public void testVarParamSameAsLet0() { test( """ function f(x) { if (!x) { let x = 6; } } """, """ function f$jscomp$unique_0(x$jscomp$unique_1) { if (!x$jscomp$unique_1) { let x$jscomp$unique_2 = 6; } } """); } @Test public void testObjectProperties() { test("var a = {x : 'a'};", "var a$jscomp$unique_0 = {x : 'a'};"); test("let a = {x : 'a'};", "let a$jscomp$unique_0 = {x : 'a'};"); test("const a = {x : 'a'};", "const a$jscomp$unique_0 = {x : 'a'};"); test("var a = {x : 'a'}; a.x", "var a$jscomp$unique_0 = {x : 'a'}; a$jscomp$unique_0.x"); } @Test public void testClassesWithContextWithInversion() { this.useDefaultRenamer = true; testWithInversion( """ var a; class Foo { constructor(a) { this.a = a; } f() { var x = 1; return a + x; } } """, """ var a; class Foo { constructor(a$jscomp$1) { this.a = a$jscomp$1; } f() { var x = 1; return a + x; } } """); // class declarations are block-scoped but not hoisted. testSameWithInversion( """ { let x = new Foo(); // ReferenceError class Foo {} } """); } @Test public void testBlockScopesWithContextWithInversion1() { this.useDefaultRenamer = true; testWithInversion( """ {let a; { let a; }} """, """ {let a; { let a$jscomp$1; }} """); } @Test public void testBlockScopesWithContextWithInversion2() { this.useDefaultRenamer = true; // function declarations are block-scoped testWithInversion( """ function foo() { function bar() { return 1; } } function boo() { function bar() { return 2; } } """, """ function foo() { function bar() { return 1; } } function boo() { function bar$jscomp$1() { return 2; } } """); } @Test public void testBlockScopesWithContextWithInversion3() { this.useDefaultRenamer = true; test( """ function foo() { function bar() { return 1; } if (true) { function bar() { return 2; } } } """, """ function foo() { function bar() { return 1; } if (true) { function bar$jscomp$1() { return 2; } } } """); } @Test public void testBlockScopesWithContextWithInversion4() { this.useDefaultRenamer = true; test( """ var f1=function(){ var x }; (function() { function f2() { alert(x) } { var x=0 } f2() })() """, """ var f1=function(){ var x }; (function() { function f2() { alert(x$jscomp$1) } { var x$jscomp$1=0 } f2() })() """); } @Test public void testBlockScopesWithContextWithInversion5() { this.useDefaultRenamer = true; testSame( """ if (true) { function f(){}; } f(); """); } @Test public void testBlockScopesWithoutContext() { this.useDefaultRenamer = false; test( """ { function foo() {return 1;} if (true) { function foo() {return 2;} } } """, """ { function foo$jscomp$unique_0() {return 1;} if (true) { function foo$jscomp$unique_1() {return 2;} } } """); test( """ function foo(x) { return foo(x) - 1; } """, """ function foo$jscomp$unique_0(x$jscomp$unique_1) { return foo$jscomp$unique_0(x$jscomp$unique_1) - 1; } """); test( """ export function foo(x) { return foo(x) - 1; } """, """ export function foo$jscomp$unique_0(x$jscomp$unique_1) { return foo$jscomp$unique_0(x$jscomp$unique_1) - 1; } """); } @Test public void testRecursiveFunctionsWithContextWithInversion() { this.useDefaultRenamer = true; testSameWithInversion( """ function foo(x) { return foo(x) - 1; } """); } @Test public void testInvertShadowedParameterNames() { useDefaultRenamer = true; testWithInversion( """ var p; function f(p) { return function g(p) { return p; } } """, """ var p; function f(p$jscomp$1) { return function g(p$jscomp$2) { return p$jscomp$2; } } """); } @Test public void testArrowFunctionWithContextWithInversion() { this.useDefaultRenamer = true; testWithInversion( """ function foo() { var f = (x) => x; return f(1); } function boo() { var f = (x) => x; return f(2); } """, """ function foo() { var f = (x) => x; return f(1); } function boo() { var f$jscomp$1 = (x$jscomp$1) => x$jscomp$1; return f$jscomp$1(2); } """); testWithInversion( """ function foo() { var f = (x, ...y) => x + y[0]; return f(1, 2); } function boo() { var f = (x, ...y) => x + y[0]; return f(1, 2); } """, """ function foo() { var f = (x, ...y) => x + y[0]; return f(1, 2); } function boo() { var f$jscomp$1 = (x$jscomp$1, ...y$jscomp$1) => x$jscomp$1 + y$jscomp$1[0]; return f$jscomp$1(1, 2); } """); } @Test public void testDefaultParameterWithContextWithInversion1() { this.useDefaultRenamer = true; testWithInversion( """ function foo(x = 1) { return x; } function boo(x = 1) { return x; } """, """ function foo(x = 1) { return x; } function boo(x$jscomp$1 = 1) { return x$jscomp$1; } """); testSameWithInversion( """ function foo(x = 1, y = x) { return x + y; } """); } @Test public void testDefaultParameterWithContextWithInversion2() { this.useDefaultRenamer = true; // Parameter default values don't see the scope of the body // Methods or functions defined "inside" parameter default values don't see the local variables // of the body. testWithInversion( """ let x = 'outer'; function foo(bar = baz => x) { let x = 'inner'; console.log(bar()); } """, """ let x = 'outer'; function foo(bar = baz => x) { let x$jscomp$1 = 'inner'; console.log(bar()); } """); testWithInversion( """ const x = 'outer'; function foo(a = x) { const x = 'inner'; return a; } """, """ const x = 'outer'; function foo(a = x) { const x$jscomp$1 = 'inner'; return a; } """); testWithInversion( """ const x = 'outerouter'; { const x = 'outer'; function foo(a = x) { return a; } foo(); } """, """ const x = 'outerouter'; { const x$jscomp$1 = 'outer'; function foo(a = x$jscomp$1) { return a; } foo(); } """); testSameWithInversion( """ function foo(x, y = x) { return x + y; } """); } @Test public void testObjectLiteralsWithContextWithInversion() { this.useDefaultRenamer = true; testWithInversion( """ function foo({x:y}) { return y; } function boo({x:y}) { return y; } """, """ function foo({x:y}) { return y; } function boo({x:y$jscomp$1}) { return y$jscomp$1 } """); } @Test public void testExportedOrImportedNamesAreUntouched() { // The eventual desired behavior is that none of the 'a's in the following test cases // are renamed to a$jscomp$1. Rewrite this test after that behavior is implemented. this.useDefaultRenamer = true; test( srcs("var a;", "let a; export {a as a};"), expected("var a;", "let a$jscomp$1; export {a$jscomp$1 as a};")); test( srcs("var a;", "import {a as a} from './foo.js'; let b = a;"), expected("var a;", "import {a as a$jscomp$1} from './foo.js'; let b = a$jscomp$1;")); } @Test public void testTwoMethodsInTheSameFileWithSameLocalNames() { this.useDefaultRenamer = true; // Verify same local names in 2 different files get different new names. // The ContextualRenamer renames an "oldName" by adding "$jscomp$\n" + "id" as a suffix string. // So, when another declaration containing "$jscomp$id" exists at any other source location in // the entire JS program (due to a prior renaming), the ContextualRenamer should not generate // that same name when renaming this declaration. test( "function foo() {var a; a;} function bar() {let a; let a$jscomp$1; a + a$jscomp$1;}", """ function foo() {var a; a;} function bar() { let a$jscomp$1; let a$jscomp$1$jscomp$1; a$jscomp$1 + a$jscomp$1$jscomp$1; } """); test( "function bar() {let a; let a$jscomp$1; a + a$jscomp$1;} function foo() {var a; a;}", """ function bar() { let a; let a$jscomp$1; a + a$jscomp$1; } function foo() {var a$jscomp$2; a$jscomp$2;} """); // tests when name with $jscomp$1 suffix comes first test( "function bar() {let a$jscomp$1; let a; a + a$jscomp$1;} function foo() {var a; a;}", """ function bar() { let a$jscomp$1; let a; a + a$jscomp$1; } function foo() {var a$jscomp$2; a$jscomp$2;} """); test( """ function bar() { let a; let a$jscomp$1; a + a$jscomp$1; } function foo() { // tests when a$jscomp$2 declared later in the same scope var a; a; var a$jscomp$2; a$jscomp$2; } """, """ function bar() { let a; let a$jscomp$1; a + a$jscomp$1; } function foo() { var a$jscomp$2; a$jscomp$2; var a$jscomp$2$jscomp$1; a$jscomp$2$jscomp$1; } """); test( """ function bar() { let a; let a$jscomp$1; a + a$jscomp$1; } function foo() { // tests when a$jscomp$2 declared first in the same scope var a$jscomp$2; a$jscomp$2; var a; a; } """, """ function bar() { let a; let a$jscomp$1; a + a$jscomp$1; } function foo() { var a$jscomp$2; a$jscomp$2; var a$jscomp$3; a$jscomp$3; } """); test( """ function bar() { let a; let a$jscomp$1; a + a$jscomp$1; } function foo() { // tests when a$jscomp$2 is declared in another scope var a; a; } function baz() { var a$jscomp$2; a$jscomp$2; } """, """ function bar() { let a; let a$jscomp$1; a + a$jscomp$1; } function foo() { var a$jscomp$2; a$jscomp$2; } function baz() { var a$jscomp$2$jscomp$1; a$jscomp$2$jscomp$1; } """); } @Test public void testTwoFilesWithSameLocalNames() { this.useDefaultRenamer = true; // Verify same local names in 2 different files get different new names test( srcs( "function foo() {var a; a;}", "function bar() {let a; let a$jscomp$1; a + a$jscomp$1;}"), expected( "function foo() {var a; a;}", """ function bar() {let a$jscomp$1; let a$jscomp$1$jscomp$1; a$jscomp$1 + a$jscomp$1$jscomp$1;} """)); } @Test public void testImportStarWithInversion() { this.useDefaultRenamer = true; testWithInversion( new String[] { "let a = 5;", // "import * as a from './a.js'; const TAU = 2 * a.PI;" }, new String[] { "let a = 5;", // "import * as a$jscomp$1 from './a.js'; const TAU = 2 * a$jscomp$1.PI" }); } @Test public void assertOnChange_throwsException() { this.useDefaultRenamer = true; this.assertOnChange = true; Exception e = assertThrows( RuntimeException.class, () -> testNoWarning("var a; function foo() { var a = 1; } ")); assertThat(e).hasMessageThat().contains("NAME a"); } @Test public void assertOnChange_noExceptionIfNothingChanges() { this.useDefaultRenamer = true; this.assertOnChange = true; testSame("const x = 1; function foo() { const y = 2; }"); } }
googleapis/google-cloud-java
36,723
java-shopping-merchant-accounts/grpc-google-shopping-merchant-accounts-v1/src/main/java/com/google/shopping/merchant/accounts/v1/HomepageServiceGrpc.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.shopping.merchant.accounts.v1; import static io.grpc.MethodDescriptor.generateFullMethodName; /** * * * <pre> * Service to support an API for a store's homepage. * </pre> */ @javax.annotation.Generated( value = "by gRPC proto compiler", comments = "Source: google/shopping/merchant/accounts/v1/homepage.proto") @io.grpc.stub.annotations.GrpcGenerated public final class HomepageServiceGrpc { private HomepageServiceGrpc() {} public static final java.lang.String SERVICE_NAME = "google.shopping.merchant.accounts.v1.HomepageService"; // Static method descriptors that strictly reflect the proto. private static volatile io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.GetHomepageRequest, com.google.shopping.merchant.accounts.v1.Homepage> getGetHomepageMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "GetHomepage", requestType = com.google.shopping.merchant.accounts.v1.GetHomepageRequest.class, responseType = com.google.shopping.merchant.accounts.v1.Homepage.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.GetHomepageRequest, com.google.shopping.merchant.accounts.v1.Homepage> getGetHomepageMethod() { io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.GetHomepageRequest, com.google.shopping.merchant.accounts.v1.Homepage> getGetHomepageMethod; if ((getGetHomepageMethod = HomepageServiceGrpc.getGetHomepageMethod) == null) { synchronized (HomepageServiceGrpc.class) { if ((getGetHomepageMethod = HomepageServiceGrpc.getGetHomepageMethod) == null) { HomepageServiceGrpc.getGetHomepageMethod = getGetHomepageMethod = io.grpc.MethodDescriptor .<com.google.shopping.merchant.accounts.v1.GetHomepageRequest, com.google.shopping.merchant.accounts.v1.Homepage> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetHomepage")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1.GetHomepageRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1.Homepage .getDefaultInstance())) .setSchemaDescriptor( new HomepageServiceMethodDescriptorSupplier("GetHomepage")) .build(); } } } return getGetHomepageMethod; } private static volatile io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest, com.google.shopping.merchant.accounts.v1.Homepage> getUpdateHomepageMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "UpdateHomepage", requestType = com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest.class, responseType = com.google.shopping.merchant.accounts.v1.Homepage.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest, com.google.shopping.merchant.accounts.v1.Homepage> getUpdateHomepageMethod() { io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest, com.google.shopping.merchant.accounts.v1.Homepage> getUpdateHomepageMethod; if ((getUpdateHomepageMethod = HomepageServiceGrpc.getUpdateHomepageMethod) == null) { synchronized (HomepageServiceGrpc.class) { if ((getUpdateHomepageMethod = HomepageServiceGrpc.getUpdateHomepageMethod) == null) { HomepageServiceGrpc.getUpdateHomepageMethod = getUpdateHomepageMethod = io.grpc.MethodDescriptor .<com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest, com.google.shopping.merchant.accounts.v1.Homepage> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "UpdateHomepage")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1.Homepage .getDefaultInstance())) .setSchemaDescriptor( new HomepageServiceMethodDescriptorSupplier("UpdateHomepage")) .build(); } } } return getUpdateHomepageMethod; } private static volatile io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.ClaimHomepageRequest, com.google.shopping.merchant.accounts.v1.Homepage> getClaimHomepageMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "ClaimHomepage", requestType = com.google.shopping.merchant.accounts.v1.ClaimHomepageRequest.class, responseType = com.google.shopping.merchant.accounts.v1.Homepage.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.ClaimHomepageRequest, com.google.shopping.merchant.accounts.v1.Homepage> getClaimHomepageMethod() { io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.ClaimHomepageRequest, com.google.shopping.merchant.accounts.v1.Homepage> getClaimHomepageMethod; if ((getClaimHomepageMethod = HomepageServiceGrpc.getClaimHomepageMethod) == null) { synchronized (HomepageServiceGrpc.class) { if ((getClaimHomepageMethod = HomepageServiceGrpc.getClaimHomepageMethod) == null) { HomepageServiceGrpc.getClaimHomepageMethod = getClaimHomepageMethod = io.grpc.MethodDescriptor .<com.google.shopping.merchant.accounts.v1.ClaimHomepageRequest, com.google.shopping.merchant.accounts.v1.Homepage> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "ClaimHomepage")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1.ClaimHomepageRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1.Homepage .getDefaultInstance())) .setSchemaDescriptor( new HomepageServiceMethodDescriptorSupplier("ClaimHomepage")) .build(); } } } return getClaimHomepageMethod; } private static volatile io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.UnclaimHomepageRequest, com.google.shopping.merchant.accounts.v1.Homepage> getUnclaimHomepageMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "UnclaimHomepage", requestType = com.google.shopping.merchant.accounts.v1.UnclaimHomepageRequest.class, responseType = com.google.shopping.merchant.accounts.v1.Homepage.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.UnclaimHomepageRequest, com.google.shopping.merchant.accounts.v1.Homepage> getUnclaimHomepageMethod() { io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.UnclaimHomepageRequest, com.google.shopping.merchant.accounts.v1.Homepage> getUnclaimHomepageMethod; if ((getUnclaimHomepageMethod = HomepageServiceGrpc.getUnclaimHomepageMethod) == null) { synchronized (HomepageServiceGrpc.class) { if ((getUnclaimHomepageMethod = HomepageServiceGrpc.getUnclaimHomepageMethod) == null) { HomepageServiceGrpc.getUnclaimHomepageMethod = getUnclaimHomepageMethod = io.grpc.MethodDescriptor .<com.google.shopping.merchant.accounts.v1.UnclaimHomepageRequest, com.google.shopping.merchant.accounts.v1.Homepage> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "UnclaimHomepage")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1.UnclaimHomepageRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1.Homepage .getDefaultInstance())) .setSchemaDescriptor( new HomepageServiceMethodDescriptorSupplier("UnclaimHomepage")) .build(); } } } return getUnclaimHomepageMethod; } /** Creates a new async stub that supports all call types for the service */ public static HomepageServiceStub newStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<HomepageServiceStub> factory = new io.grpc.stub.AbstractStub.StubFactory<HomepageServiceStub>() { @java.lang.Override public HomepageServiceStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new HomepageServiceStub(channel, callOptions); } }; return HomepageServiceStub.newStub(factory, channel); } /** Creates a new blocking-style stub that supports all types of calls on the service */ public static HomepageServiceBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<HomepageServiceBlockingV2Stub> factory = new io.grpc.stub.AbstractStub.StubFactory<HomepageServiceBlockingV2Stub>() { @java.lang.Override public HomepageServiceBlockingV2Stub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new HomepageServiceBlockingV2Stub(channel, callOptions); } }; return HomepageServiceBlockingV2Stub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static HomepageServiceBlockingStub newBlockingStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<HomepageServiceBlockingStub> factory = new io.grpc.stub.AbstractStub.StubFactory<HomepageServiceBlockingStub>() { @java.lang.Override public HomepageServiceBlockingStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new HomepageServiceBlockingStub(channel, callOptions); } }; return HomepageServiceBlockingStub.newStub(factory, channel); } /** Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static HomepageServiceFutureStub newFutureStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<HomepageServiceFutureStub> factory = new io.grpc.stub.AbstractStub.StubFactory<HomepageServiceFutureStub>() { @java.lang.Override public HomepageServiceFutureStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new HomepageServiceFutureStub(channel, callOptions); } }; return HomepageServiceFutureStub.newStub(factory, channel); } /** * * * <pre> * Service to support an API for a store's homepage. * </pre> */ public interface AsyncService { /** * * * <pre> * Retrieves a store's homepage. * </pre> */ default void getHomepage( com.google.shopping.merchant.accounts.v1.GetHomepageRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.Homepage> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getGetHomepageMethod(), responseObserver); } /** * * * <pre> * Updates a store's homepage. Executing this method requires admin access. * </pre> */ default void updateHomepage( com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.Homepage> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getUpdateHomepageMethod(), responseObserver); } /** * * * <pre> * Claims a store's homepage. Executing this method requires admin access. * If the homepage is already claimed, this will recheck the * verification (unless the business is exempted from claiming, which also * exempts from verification) and return a successful response. If ownership * can no longer be verified, it will return an error, but it won't clear the * claim. * In case of failure, a canonical error message is returned: * * PERMISSION_DENIED: User doesn't have the necessary permissions on this * Merchant Center account. * * FAILED_PRECONDITION: * - The account is not a Merchant Center account. * - Merchant Center account doesn't have a homepage. * - Claiming failed (in this case the error message contains more * details). * </pre> */ default void claimHomepage( com.google.shopping.merchant.accounts.v1.ClaimHomepageRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.Homepage> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getClaimHomepageMethod(), responseObserver); } /** * * * <pre> * Unclaims a store's homepage. Executing this method requires admin access. * </pre> */ default void unclaimHomepage( com.google.shopping.merchant.accounts.v1.UnclaimHomepageRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.Homepage> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getUnclaimHomepageMethod(), responseObserver); } } /** * Base class for the server implementation of the service HomepageService. * * <pre> * Service to support an API for a store's homepage. * </pre> */ public abstract static class HomepageServiceImplBase implements io.grpc.BindableService, AsyncService { @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return HomepageServiceGrpc.bindService(this); } } /** * A stub to allow clients to do asynchronous rpc calls to service HomepageService. * * <pre> * Service to support an API for a store's homepage. * </pre> */ public static final class HomepageServiceStub extends io.grpc.stub.AbstractAsyncStub<HomepageServiceStub> { private HomepageServiceStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected HomepageServiceStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new HomepageServiceStub(channel, callOptions); } /** * * * <pre> * Retrieves a store's homepage. * </pre> */ public void getHomepage( com.google.shopping.merchant.accounts.v1.GetHomepageRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.Homepage> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getGetHomepageMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Updates a store's homepage. Executing this method requires admin access. * </pre> */ public void updateHomepage( com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.Homepage> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getUpdateHomepageMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Claims a store's homepage. Executing this method requires admin access. * If the homepage is already claimed, this will recheck the * verification (unless the business is exempted from claiming, which also * exempts from verification) and return a successful response. If ownership * can no longer be verified, it will return an error, but it won't clear the * claim. * In case of failure, a canonical error message is returned: * * PERMISSION_DENIED: User doesn't have the necessary permissions on this * Merchant Center account. * * FAILED_PRECONDITION: * - The account is not a Merchant Center account. * - Merchant Center account doesn't have a homepage. * - Claiming failed (in this case the error message contains more * details). * </pre> */ public void claimHomepage( com.google.shopping.merchant.accounts.v1.ClaimHomepageRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.Homepage> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getClaimHomepageMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Unclaims a store's homepage. Executing this method requires admin access. * </pre> */ public void unclaimHomepage( com.google.shopping.merchant.accounts.v1.UnclaimHomepageRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.Homepage> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getUnclaimHomepageMethod(), getCallOptions()), request, responseObserver); } } /** * A stub to allow clients to do synchronous rpc calls to service HomepageService. * * <pre> * Service to support an API for a store's homepage. * </pre> */ public static final class HomepageServiceBlockingV2Stub extends io.grpc.stub.AbstractBlockingStub<HomepageServiceBlockingV2Stub> { private HomepageServiceBlockingV2Stub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected HomepageServiceBlockingV2Stub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new HomepageServiceBlockingV2Stub(channel, callOptions); } /** * * * <pre> * Retrieves a store's homepage. * </pre> */ public com.google.shopping.merchant.accounts.v1.Homepage getHomepage( com.google.shopping.merchant.accounts.v1.GetHomepageRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetHomepageMethod(), getCallOptions(), request); } /** * * * <pre> * Updates a store's homepage. Executing this method requires admin access. * </pre> */ public com.google.shopping.merchant.accounts.v1.Homepage updateHomepage( com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getUpdateHomepageMethod(), getCallOptions(), request); } /** * * * <pre> * Claims a store's homepage. Executing this method requires admin access. * If the homepage is already claimed, this will recheck the * verification (unless the business is exempted from claiming, which also * exempts from verification) and return a successful response. If ownership * can no longer be verified, it will return an error, but it won't clear the * claim. * In case of failure, a canonical error message is returned: * * PERMISSION_DENIED: User doesn't have the necessary permissions on this * Merchant Center account. * * FAILED_PRECONDITION: * - The account is not a Merchant Center account. * - Merchant Center account doesn't have a homepage. * - Claiming failed (in this case the error message contains more * details). * </pre> */ public com.google.shopping.merchant.accounts.v1.Homepage claimHomepage( com.google.shopping.merchant.accounts.v1.ClaimHomepageRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getClaimHomepageMethod(), getCallOptions(), request); } /** * * * <pre> * Unclaims a store's homepage. Executing this method requires admin access. * </pre> */ public com.google.shopping.merchant.accounts.v1.Homepage unclaimHomepage( com.google.shopping.merchant.accounts.v1.UnclaimHomepageRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getUnclaimHomepageMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do limited synchronous rpc calls to service HomepageService. * * <pre> * Service to support an API for a store's homepage. * </pre> */ public static final class HomepageServiceBlockingStub extends io.grpc.stub.AbstractBlockingStub<HomepageServiceBlockingStub> { private HomepageServiceBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected HomepageServiceBlockingStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new HomepageServiceBlockingStub(channel, callOptions); } /** * * * <pre> * Retrieves a store's homepage. * </pre> */ public com.google.shopping.merchant.accounts.v1.Homepage getHomepage( com.google.shopping.merchant.accounts.v1.GetHomepageRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetHomepageMethod(), getCallOptions(), request); } /** * * * <pre> * Updates a store's homepage. Executing this method requires admin access. * </pre> */ public com.google.shopping.merchant.accounts.v1.Homepage updateHomepage( com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getUpdateHomepageMethod(), getCallOptions(), request); } /** * * * <pre> * Claims a store's homepage. Executing this method requires admin access. * If the homepage is already claimed, this will recheck the * verification (unless the business is exempted from claiming, which also * exempts from verification) and return a successful response. If ownership * can no longer be verified, it will return an error, but it won't clear the * claim. * In case of failure, a canonical error message is returned: * * PERMISSION_DENIED: User doesn't have the necessary permissions on this * Merchant Center account. * * FAILED_PRECONDITION: * - The account is not a Merchant Center account. * - Merchant Center account doesn't have a homepage. * - Claiming failed (in this case the error message contains more * details). * </pre> */ public com.google.shopping.merchant.accounts.v1.Homepage claimHomepage( com.google.shopping.merchant.accounts.v1.ClaimHomepageRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getClaimHomepageMethod(), getCallOptions(), request); } /** * * * <pre> * Unclaims a store's homepage. Executing this method requires admin access. * </pre> */ public com.google.shopping.merchant.accounts.v1.Homepage unclaimHomepage( com.google.shopping.merchant.accounts.v1.UnclaimHomepageRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getUnclaimHomepageMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do ListenableFuture-style rpc calls to service HomepageService. * * <pre> * Service to support an API for a store's homepage. * </pre> */ public static final class HomepageServiceFutureStub extends io.grpc.stub.AbstractFutureStub<HomepageServiceFutureStub> { private HomepageServiceFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected HomepageServiceFutureStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new HomepageServiceFutureStub(channel, callOptions); } /** * * * <pre> * Retrieves a store's homepage. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.shopping.merchant.accounts.v1.Homepage> getHomepage(com.google.shopping.merchant.accounts.v1.GetHomepageRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getGetHomepageMethod(), getCallOptions()), request); } /** * * * <pre> * Updates a store's homepage. Executing this method requires admin access. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.shopping.merchant.accounts.v1.Homepage> updateHomepage(com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getUpdateHomepageMethod(), getCallOptions()), request); } /** * * * <pre> * Claims a store's homepage. Executing this method requires admin access. * If the homepage is already claimed, this will recheck the * verification (unless the business is exempted from claiming, which also * exempts from verification) and return a successful response. If ownership * can no longer be verified, it will return an error, but it won't clear the * claim. * In case of failure, a canonical error message is returned: * * PERMISSION_DENIED: User doesn't have the necessary permissions on this * Merchant Center account. * * FAILED_PRECONDITION: * - The account is not a Merchant Center account. * - Merchant Center account doesn't have a homepage. * - Claiming failed (in this case the error message contains more * details). * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.shopping.merchant.accounts.v1.Homepage> claimHomepage(com.google.shopping.merchant.accounts.v1.ClaimHomepageRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getClaimHomepageMethod(), getCallOptions()), request); } /** * * * <pre> * Unclaims a store's homepage. Executing this method requires admin access. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.shopping.merchant.accounts.v1.Homepage> unclaimHomepage(com.google.shopping.merchant.accounts.v1.UnclaimHomepageRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getUnclaimHomepageMethod(), getCallOptions()), request); } } private static final int METHODID_GET_HOMEPAGE = 0; private static final int METHODID_UPDATE_HOMEPAGE = 1; private static final int METHODID_CLAIM_HOMEPAGE = 2; private static final int METHODID_UNCLAIM_HOMEPAGE = 3; private static final class MethodHandlers<Req, Resp> implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>, io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> { private final AsyncService serviceImpl; private final int methodId; MethodHandlers(AsyncService serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { case METHODID_GET_HOMEPAGE: serviceImpl.getHomepage( (com.google.shopping.merchant.accounts.v1.GetHomepageRequest) request, (io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.Homepage>) responseObserver); break; case METHODID_UPDATE_HOMEPAGE: serviceImpl.updateHomepage( (com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest) request, (io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.Homepage>) responseObserver); break; case METHODID_CLAIM_HOMEPAGE: serviceImpl.claimHomepage( (com.google.shopping.merchant.accounts.v1.ClaimHomepageRequest) request, (io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.Homepage>) responseObserver); break; case METHODID_UNCLAIM_HOMEPAGE: serviceImpl.unclaimHomepage( (com.google.shopping.merchant.accounts.v1.UnclaimHomepageRequest) request, (io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.Homepage>) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver<Req> invoke( io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { default: throw new AssertionError(); } } } public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getGetHomepageMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.shopping.merchant.accounts.v1.GetHomepageRequest, com.google.shopping.merchant.accounts.v1.Homepage>( service, METHODID_GET_HOMEPAGE))) .addMethod( getUpdateHomepageMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.shopping.merchant.accounts.v1.UpdateHomepageRequest, com.google.shopping.merchant.accounts.v1.Homepage>( service, METHODID_UPDATE_HOMEPAGE))) .addMethod( getClaimHomepageMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.shopping.merchant.accounts.v1.ClaimHomepageRequest, com.google.shopping.merchant.accounts.v1.Homepage>( service, METHODID_CLAIM_HOMEPAGE))) .addMethod( getUnclaimHomepageMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.shopping.merchant.accounts.v1.UnclaimHomepageRequest, com.google.shopping.merchant.accounts.v1.Homepage>( service, METHODID_UNCLAIM_HOMEPAGE))) .build(); } private abstract static class HomepageServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { HomepageServiceBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return com.google.shopping.merchant.accounts.v1.HomepageProto.getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("HomepageService"); } } private static final class HomepageServiceFileDescriptorSupplier extends HomepageServiceBaseDescriptorSupplier { HomepageServiceFileDescriptorSupplier() {} } private static final class HomepageServiceMethodDescriptorSupplier extends HomepageServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final java.lang.String methodName; HomepageServiceMethodDescriptorSupplier(java.lang.String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (HomepageServiceGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new HomepageServiceFileDescriptorSupplier()) .addMethod(getGetHomepageMethod()) .addMethod(getUpdateHomepageMethod()) .addMethod(getClaimHomepageMethod()) .addMethod(getUnclaimHomepageMethod()) .build(); } } } return result; } }
apache/kafka
36,793
clients/clients-integration-tests/src/test/java/org/apache/kafka/clients/consumer/ConsumerBounceTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.clients.consumer; import kafka.server.KafkaBroker; import org.apache.kafka.clients.ClientsTestUtils; import org.apache.kafka.clients.producer.Producer; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.config.TopicConfig; import org.apache.kafka.common.errors.GroupMaxSizeReachedException; import org.apache.kafka.common.message.FindCoordinatorRequestData; import org.apache.kafka.common.requests.FindCoordinatorRequest; import org.apache.kafka.common.requests.FindCoordinatorResponse; import org.apache.kafka.common.test.ClusterInstance; import org.apache.kafka.common.test.api.ClusterConfigProperty; import org.apache.kafka.common.test.api.ClusterTest; import org.apache.kafka.common.test.api.ClusterTestDefaults; import org.apache.kafka.common.test.api.Type; import org.apache.kafka.common.utils.LogContext; import org.apache.kafka.coordinator.group.GroupCoordinatorConfig; import org.apache.kafka.server.IntegrationTestUtils; import org.apache.kafka.server.config.KRaftConfigs; import org.apache.kafka.server.config.ReplicationConfigs; import org.apache.kafka.server.config.ServerConfigs; import org.apache.kafka.server.config.ServerLogConfigs; import org.apache.kafka.server.util.ShutdownableThread; import org.apache.kafka.test.TestUtils; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.slf4j.Logger; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.time.Duration; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.IntStream; import static org.apache.kafka.test.TestUtils.SEEDED_RANDOM; import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertInstanceOf; import static org.junit.jupiter.api.Assertions.assertTrue; /** * Integration tests for the consumer that cover basic usage as well as server failures */ @ClusterTestDefaults( types = {Type.KRAFT}, brokers = ConsumerBounceTest.BROKER_COUNT, serverProperties = { @ClusterConfigProperty(key = GroupCoordinatorConfig.OFFSETS_TOPIC_REPLICATION_FACTOR_CONFIG, value = "3"), // don't want to lose offset @ClusterConfigProperty(key = GroupCoordinatorConfig.OFFSETS_TOPIC_PARTITIONS_CONFIG, value = "1"), @ClusterConfigProperty(key = GroupCoordinatorConfig.GROUP_MIN_SESSION_TIMEOUT_MS_CONFIG, value = "10"), // set small enough session timeout @ClusterConfigProperty(key = GroupCoordinatorConfig.GROUP_INITIAL_REBALANCE_DELAY_MS_CONFIG, value = "0"), // Tests will run for CONSUMER and CLASSIC group protocol, so set the group max size property // required for each. @ClusterConfigProperty(key = GroupCoordinatorConfig.CONSUMER_GROUP_MAX_SIZE_CONFIG, value = ConsumerBounceTest.MAX_GROUP_SIZE), @ClusterConfigProperty(key = GroupCoordinatorConfig.GROUP_MAX_SIZE_CONFIG, value = ConsumerBounceTest.MAX_GROUP_SIZE), @ClusterConfigProperty(key = ServerLogConfigs.AUTO_CREATE_TOPICS_ENABLE_CONFIG, value = "false"), @ClusterConfigProperty(key = ServerLogConfigs.LOG_INITIAL_TASK_DELAY_MS_CONFIG, value = "100"), @ClusterConfigProperty(key = ServerConfigs.CONTROLLED_SHUTDOWN_ENABLE_CONFIG, value = "false"), @ClusterConfigProperty(key = TopicConfig.UNCLEAN_LEADER_ELECTION_ENABLE_CONFIG, value = "true"), @ClusterConfigProperty(key = TopicConfig.FILE_DELETE_DELAY_MS_CONFIG, value = "1000"), @ClusterConfigProperty(key = ReplicationConfigs.UNCLEAN_LEADER_ELECTION_INTERVAL_MS_CONFIG, value = "50"), @ClusterConfigProperty(key = KRaftConfigs.BROKER_HEARTBEAT_INTERVAL_MS_CONFIG, value = "50"), @ClusterConfigProperty(key = KRaftConfigs.BROKER_SESSION_TIMEOUT_MS_CONFIG, value = "300") } ) public class ConsumerBounceTest { private final Logger logger = new LogContext("ConsumerBounceTest").logger(this.getClass()); public static final int BROKER_COUNT = 3; public static final String MAX_GROUP_SIZE = "5"; private final Optional<Long> gracefulCloseTimeMs = Optional.of(1000L); private final ScheduledExecutorService executor = Executors.newScheduledThreadPool(2); private final String topic = "topic"; private final int partition = 0; private final int numPartitions = 3; private final short numReplica = 3; private final TopicPartition topicPartition = new TopicPartition(topic, partition); private final ClusterInstance clusterInstance; private final List<Consumer<byte[], byte[]>> consumers = new ArrayList<>(); private final List<ConsumerAssignmentPoller> consumerPollers = new ArrayList<>(); ConsumerBounceTest(ClusterInstance clusterInstance) { this.clusterInstance = clusterInstance; } @BeforeEach void setUp() throws InterruptedException { clusterInstance.createTopic(topic, numPartitions, numReplica); } @AfterEach void tearDown() throws InterruptedException { consumerPollers.forEach(poller -> { try { poller.shutdown(); } catch (InterruptedException e) { throw new RuntimeException(e); } }); executor.shutdownNow(); // Wait for any active tasks to terminate to ensure consumer is not closed while being used from another thread assertTrue(executor.awaitTermination(5000, TimeUnit.MILLISECONDS), "Executor did not terminate"); consumers.forEach(Consumer::close); } @ClusterTest public void testClassicConsumerConsumptionWithBrokerFailures() throws Exception { consumeWithBrokerFailures(10, GroupProtocol.CLASSIC); } @ClusterTest public void testAsyncConsumerConsumptionWithBrokerFailures() throws Exception { consumeWithBrokerFailures(10, GroupProtocol.CONSUMER); } /** * 1. Produce a bunch of messages * 2. Then consume the messages while killing and restarting brokers at random */ private void consumeWithBrokerFailures(int numIters, GroupProtocol groupProtocol) throws InterruptedException { int numRecords = 1000; ClientsTestUtils.sendRecords(clusterInstance, topicPartition, numRecords); AtomicInteger consumed = new AtomicInteger(0); try (Consumer<byte[], byte[]> consumer = clusterInstance.consumer(Map.of(ConsumerConfig.GROUP_PROTOCOL_CONFIG, groupProtocol.name))) { consumer.subscribe(List.of(topic)); BounceBrokerScheduler scheduler = new BounceBrokerScheduler(numIters, clusterInstance); try { scheduler.start(); while (scheduler.isRunning()) { ConsumerRecords<byte[], byte[]> records = consumer.poll(Duration.ofMillis(100)); records.forEach(record -> { assertEquals(consumed.get(), record.offset()); consumed.incrementAndGet(); }); if (!records.isEmpty()) { consumer.commitSync(); long currentPosition = consumer.position(topicPartition); long committedOffset = consumer.committed(Set.of(topicPartition)).get(topicPartition).offset(); assertEquals(currentPosition, committedOffset); if (currentPosition == numRecords) { consumer.seekToBeginning(List.of()); consumed.set(0); } } } } finally { scheduler.shutdown(); } } } @ClusterTest public void testClassicConsumerSeekAndCommitWithBrokerFailures() throws InterruptedException { seekAndCommitWithBrokerFailures(5, GroupProtocol.CLASSIC); } @ClusterTest public void testAsyncConsumerSeekAndCommitWithBrokerFailures() throws InterruptedException { seekAndCommitWithBrokerFailures(5, GroupProtocol.CONSUMER); } private void seekAndCommitWithBrokerFailures(int numIters, GroupProtocol groupProtocol) throws InterruptedException { int numRecords = 1000; ClientsTestUtils.sendRecords(clusterInstance, topicPartition, numRecords); try (Consumer<byte[], byte[]> consumer = clusterInstance.consumer(Map.of(ConsumerConfig.GROUP_PROTOCOL_CONFIG, groupProtocol.name))) { consumer.assign(List.of(topicPartition)); consumer.seek(topicPartition, 0); TestUtils.waitForCondition(() -> clusterInstance.brokers().values().stream().allMatch(broker -> broker.replicaManager().localLog(topicPartition).get().highWatermark() == numRecords ), 30000, "Failed to update high watermark for followers after timeout."); BounceBrokerScheduler scheduler = new BounceBrokerScheduler(numIters, clusterInstance); try { scheduler.start(); while (scheduler.isRunning()) { int coin = SEEDED_RANDOM.nextInt(0, 3); if (coin == 0) { logger.info("Seeking to end of log."); consumer.seekToEnd(List.of()); assertEquals(numRecords, consumer.position(topicPartition)); } else if (coin == 1) { int pos = SEEDED_RANDOM.nextInt(numRecords); logger.info("Seeking to {}", pos); consumer.seek(topicPartition, pos); assertEquals(pos, consumer.position(topicPartition)); } else { logger.info("Committing offset."); consumer.commitSync(); assertEquals(consumer.position(topicPartition), consumer.committed(Set.of(topicPartition)).get(topicPartition).offset()); } } } finally { scheduler.shutdown(); } } } @ClusterTest public void testClassicSubscribeWhenTopicUnavailable() throws InterruptedException { testSubscribeWhenTopicUnavailable(GroupProtocol.CLASSIC); } @ClusterTest public void testAsyncSubscribeWhenTopicUnavailable() throws InterruptedException { testSubscribeWhenTopicUnavailable(GroupProtocol.CONSUMER); } private void testSubscribeWhenTopicUnavailable(GroupProtocol groupProtocol) throws InterruptedException { String newTopic = "new-topic"; TopicPartition newTopicPartition = new TopicPartition(newTopic, 0); int numRecords = 1000; Consumer<byte[], byte[]> consumer = clusterInstance.consumer( Map.of(ConsumerConfig.GROUP_PROTOCOL_CONFIG, groupProtocol.name, ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, 6000, ConsumerConfig.METADATA_MAX_AGE_CONFIG, 100)); consumers.add(consumer); consumer.subscribe(List.of(newTopic)); consumer.poll(Duration.ZERO); // Schedule topic creation after 2 seconds executor.schedule(() -> assertDoesNotThrow(() -> clusterInstance.createTopic(newTopic, numPartitions, numReplica)), 2, TimeUnit.SECONDS); // Start first poller ConsumerAssignmentPoller poller = new ConsumerAssignmentPoller(consumer, List.of(newTopic)); consumerPollers.add(poller); poller.start(); ClientsTestUtils.sendRecords(clusterInstance, newTopicPartition, numRecords); receiveExactRecords(poller, numRecords, 60000L); poller.shutdown(); // Simulate broker failure and recovery clusterInstance.brokers().keySet().forEach(clusterInstance::shutdownBroker); Thread.sleep(500); clusterInstance.brokers().keySet().forEach(clusterInstance::startBroker); // Start second poller after recovery ConsumerAssignmentPoller poller2 = new ConsumerAssignmentPoller(consumer, List.of(newTopic)); consumerPollers.add(poller2); poller2.start(); ClientsTestUtils.sendRecords(clusterInstance, newTopicPartition, numRecords); receiveExactRecords(poller2, numRecords, 60000L); } @ClusterTest public void testClassicClose() throws Exception { testClose(GroupProtocol.CLASSIC); } @ClusterTest public void testAsyncClose() throws Exception { testClose(GroupProtocol.CONSUMER); } private void testClose(GroupProtocol groupProtocol) throws Exception { int numRecords = 10; ClientsTestUtils.sendRecords(clusterInstance, topicPartition, numRecords); checkCloseGoodPath(groupProtocol, numRecords, "group1"); checkCloseWithCoordinatorFailure(groupProtocol, numRecords, "group2", "group3"); checkCloseWithClusterFailure(groupProtocol, numRecords, "group4", "group5"); } /** * Consumer is closed while cluster is healthy. Consumer should complete pending offset commits * and leave group. New consumer instance should be able to join group and start consuming from * last committed offset. */ private void checkCloseGoodPath(GroupProtocol groupProtocol, int numRecords, String groupId) throws InterruptedException { Consumer<byte[], byte[]> consumer = createConsumerAndReceive(groupId, false, numRecords, Map.of(ConsumerConfig.GROUP_PROTOCOL_CONFIG, groupProtocol.name)); assertDoesNotThrow(() -> submitCloseAndValidate(consumer, Long.MAX_VALUE, Optional.empty(), gracefulCloseTimeMs).get()); checkClosedState(groupId, numRecords); } /** * Consumer closed while coordinator is unavailable. Close of consumers using group * management should complete after commit attempt even though commits fail due to rebalance. * Close of consumers using manual assignment should complete with successful commits since a * broker is available. */ private void checkCloseWithCoordinatorFailure(GroupProtocol groupProtocol, int numRecords, String dynamicGroup, String manualGroup) throws Exception { Consumer<byte[], byte[]> dynamicConsumer = createConsumerAndReceive(dynamicGroup, false, numRecords, Map.of(ConsumerConfig.GROUP_PROTOCOL_CONFIG, groupProtocol.name)); Consumer<byte[], byte[]> manualConsumer = createConsumerAndReceive(manualGroup, true, numRecords, Map.of(ConsumerConfig.GROUP_PROTOCOL_CONFIG, groupProtocol.name)); findCoordinators(List.of(dynamicGroup, manualGroup)).forEach(clusterInstance::shutdownBroker); submitCloseAndValidate(dynamicConsumer, Long.MAX_VALUE, Optional.empty(), gracefulCloseTimeMs).get(); submitCloseAndValidate(manualConsumer, Long.MAX_VALUE, Optional.empty(), gracefulCloseTimeMs).get(); restartDeadBrokers(); checkClosedState(dynamicGroup, 0); checkClosedState(manualGroup, numRecords); } /** * Consumer is closed while all brokers are unavailable. Cannot rebalance or commit offsets since * there is no coordinator, but close should timeout and return. If close is invoked with a very * large timeout, close should timeout after request timeout. */ private void checkCloseWithClusterFailure(GroupProtocol groupProtocol, int numRecords, String group1, String group2) throws Exception { Consumer<byte[], byte[]> consumer1 = createConsumerAndReceive(group1, false, numRecords, Map.of(ConsumerConfig.GROUP_PROTOCOL_CONFIG, groupProtocol.name)); Map<String, String> consumerConfig = new HashMap<>(); long requestTimeout = 6000; if (groupProtocol.equals(GroupProtocol.CLASSIC)) { consumerConfig.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "5000"); consumerConfig.put(ConsumerConfig.HEARTBEAT_INTERVAL_MS_CONFIG, "1000"); } consumerConfig.put(ConsumerConfig.REQUEST_TIMEOUT_MS_CONFIG, Long.toString(requestTimeout)); consumerConfig.put(ConsumerConfig.GROUP_PROTOCOL_CONFIG, groupProtocol.name); Consumer<byte[], byte[]> consumer2 = createConsumerAndReceive(group2, true, numRecords, consumerConfig); clusterInstance.brokers().keySet().forEach(clusterInstance::shutdownBroker); long closeTimeout = 2000; submitCloseAndValidate(consumer1, closeTimeout, Optional.empty(), Optional.of(closeTimeout)).get(); submitCloseAndValidate(consumer2, Long.MAX_VALUE, Optional.empty(), Optional.of(requestTimeout)).get(); } private Set<Integer> findCoordinators(List<String> groups) throws Exception { FindCoordinatorRequest request = new FindCoordinatorRequest.Builder(new FindCoordinatorRequestData() .setKeyType(FindCoordinatorRequest.CoordinatorType.GROUP.id()) .setCoordinatorKeys(groups)).build(); Set<Integer> nodes = new HashSet<>(); TestUtils.waitForCondition(() -> { FindCoordinatorResponse response = null; try { response = IntegrationTestUtils.connectAndReceive(request, clusterInstance.brokerBoundPorts().get(0)); } catch (IOException e) { return false; } if (response.hasError()) return false; for (String group : groups) if (response.coordinatorByKey(group).isEmpty()) return false; else nodes.add(response.coordinatorByKey(group).get().nodeId()); return true; }, "Failed to find coordinator for group " + groups); return nodes; } @ClusterTest public void testClassicConsumerReceivesFatalExceptionWhenGroupPassesMaxSize() throws Exception { testConsumerReceivesFatalExceptionWhenGroupPassesMaxSize(GroupProtocol.CLASSIC); } @ClusterTest public void testAsyncConsumerReceivesFatalExceptionWhenGroupPassesMaxSize() throws Exception { testConsumerReceivesFatalExceptionWhenGroupPassesMaxSize(GroupProtocol.CONSUMER); } private void testConsumerReceivesFatalExceptionWhenGroupPassesMaxSize(GroupProtocol groupProtocol) throws Exception { String group = "fatal-exception-test"; String topic = "fatal-exception-test"; Map<String, String> consumerConfig = new HashMap<>(); int numPartition = Integer.parseInt(MAX_GROUP_SIZE); consumerConfig.put(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, "60000"); if (groupProtocol.equals(GroupProtocol.CLASSIC)) { consumerConfig.put(ConsumerConfig.HEARTBEAT_INTERVAL_MS_CONFIG, "1000"); } consumerConfig.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false"); clusterInstance.createTopic(topic, Integer.parseInt(MAX_GROUP_SIZE), (short) BROKER_COUNT); Set<TopicPartition> partitions = new HashSet<>(); for (int i = 0; i < Integer.parseInt(MAX_GROUP_SIZE); ++i) partitions.add(new TopicPartition(topic, i)); addConsumersToGroupAndWaitForGroupAssignment( Integer.parseInt(MAX_GROUP_SIZE), List.of(topic), partitions, group, consumerConfig ); addConsumersToGroup( 1, List.of(topic), group, consumerConfig ); ConsumerAssignmentPoller rejectedConsumer = consumerPollers.get(consumerPollers.size() - 1); consumerPollers.remove(consumerPollers.size() - 1); TestUtils.waitForCondition( () -> rejectedConsumer.getThrownException().isPresent(), "Extra consumer did not throw an exception" ); assertInstanceOf(GroupMaxSizeReachedException.class, rejectedConsumer.getThrownException().get()); // assert group continues to live and the records to be distributed across all partitions. var data = "data".getBytes(StandardCharsets.UTF_8); try (Producer<byte[], byte[]> producer = clusterInstance.producer()) { IntStream.range(0, numPartition * 100).forEach(index -> producer.send(new ProducerRecord<>(topic, index % numPartition, data, data))); } TestUtils.waitForCondition( () -> consumerPollers.stream().allMatch(p -> p.receivedMessages() >= 100), 10000L, "The consumers in the group could not fetch the expected records" ); } /** * Create 'numOfConsumersToAdd' consumers, add them to the consumer group, and create corresponding * pollers. Wait for partition re-assignment and validate. * * Assignment validation requires that total number of partitions is greater than or equal to * the resulting number of consumers in the group. * * @param numOfConsumersToAdd number of consumers to create and add to the consumer group * @param topicsToSubscribe topics to subscribe * @param subscriptions set of all topic partitions * @param group consumer group ID */ private void addConsumersToGroupAndWaitForGroupAssignment( int numOfConsumersToAdd, List<String> topicsToSubscribe, Set<TopicPartition> subscriptions, String group, Map<String, String> consumerConfig ) throws InterruptedException { // Validation: number of consumers should not exceed number of partitions assertTrue(consumers.size() + numOfConsumersToAdd <= subscriptions.size(), "Total consumers exceed number of partitions"); // Add consumers and pollers addConsumersToGroup(numOfConsumersToAdd, topicsToSubscribe, group, consumerConfig); // Validate that all pollers have assigned partitions validateGroupAssignment(consumerPollers, subscriptions); } /** * Check whether partition assignment is valid. * Assumes partition assignment is valid iff: * 1. Every consumer got assigned at least one partition * 2. Each partition is assigned to only one consumer * 3. Every partition is assigned to one of the consumers * 4. The assignment is the same as expected assignment (if provided) * * @param assignments List of assignments, one set per consumer * @param partitions All expected partitions * @param expectedAssignment Optional expected assignment * @return true if assignment is valid */ private boolean isPartitionAssignmentValid( List<Set<TopicPartition>> assignments, Set<TopicPartition> partitions, List<Set<TopicPartition>> expectedAssignment ) { // 1. Check that every consumer has non-empty assignment boolean allNonEmpty = assignments.stream().noneMatch(Set::isEmpty); if (!allNonEmpty) return false; // 2. Check that total assigned partitions equals number of unique partitions Set<TopicPartition> allAssignedPartitions = new HashSet<>(); for (Set<TopicPartition> assignment : assignments) { allAssignedPartitions.addAll(assignment); } if (allAssignedPartitions.size() != partitions.size()) { // Either some partitions were assigned multiple times or some were not assigned return false; } // 3. Check that assigned partitions exactly match the expected set if (!allAssignedPartitions.equals(partitions)) { return false; } // 4. If expected assignment is given, check for exact match if (expectedAssignment != null && !expectedAssignment.isEmpty()) { if (assignments.size() != expectedAssignment.size()) return false; for (int i = 0; i < assignments.size(); i++) { if (!assignments.get(i).equals(expectedAssignment.get(i))) return false; } } return true; } /** * Wait for consumers to get partition assignment and validate it. * * @param consumerPollers Consumer pollers corresponding to the consumer group being tested * @param subscriptions Set of all topic partitions * @param msg Optional message to print if validation fails * @param waitTimeMs Wait timeout in milliseconds * @param expectedAssignments Expected assignments (optional) */ private void validateGroupAssignment( List<ConsumerAssignmentPoller> consumerPollers, Set<TopicPartition> subscriptions, Optional<String> msg, long waitTimeMs, List<Set<TopicPartition>> expectedAssignments ) throws InterruptedException { List<Set<TopicPartition>> assignments = new ArrayList<>(); TestUtils.waitForCondition(() -> { assignments.clear(); consumerPollers.forEach(poller -> assignments.add(poller.consumerAssignment())); return isPartitionAssignmentValid(assignments, subscriptions, expectedAssignments); }, waitTimeMs, msg.orElse("Did not get valid assignment for partitions " + subscriptions + ". Instead got: " + assignments)); } // Overload for convenience (optional msg and expectedAssignments) private void validateGroupAssignment( List<ConsumerAssignmentPoller> consumerPollers, Set<TopicPartition> subscriptions ) throws InterruptedException { validateGroupAssignment(consumerPollers, subscriptions, Optional.empty(), 10000L, new ArrayList<>()); } /** * Create 'numOfConsumersToAdd' consumers, add them to the consumer group, and create corresponding pollers. * * @param numOfConsumersToAdd number of consumers to create and add to the consumer group * @param topicsToSubscribe topics to which new consumers will subscribe * @param group consumer group ID */ private void addConsumersToGroup( int numOfConsumersToAdd, List<String> topicsToSubscribe, String group, Map<String, String> consumerConfigs) { Map<String, Object> configs = new HashMap<>(consumerConfigs); configs.put(ConsumerConfig.GROUP_ID_CONFIG, group); for (int i = 0; i < numOfConsumersToAdd; i++) { Consumer<byte[], byte[]> consumer = clusterInstance.consumer(configs); consumers.add(consumer); ConsumerAssignmentPoller poller = new ConsumerAssignmentPoller(consumer, topicsToSubscribe); poller.start(); consumerPollers.add(poller); } } @ClusterTest public void testClassicCloseDuringRebalance() throws Exception { testCloseDuringRebalance(GroupProtocol.CLASSIC); } @ClusterTest public void testAsyncCloseDuringRebalance() throws Exception { testCloseDuringRebalance(GroupProtocol.CONSUMER); } public void testCloseDuringRebalance(GroupProtocol groupProtocol) throws Exception { Map<String, String> consumerConfig = new HashMap<>(); consumerConfig.put(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, "60000"); if (groupProtocol.equals(GroupProtocol.CLASSIC)) { consumerConfig.put(ConsumerConfig.HEARTBEAT_INTERVAL_MS_CONFIG, "1000"); } consumerConfig.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false"); checkCloseDuringRebalance(consumerConfig); } private void checkCloseDuringRebalance(Map<String, String> consumerConfig) throws Exception { Map<String, Object> configs = new HashMap<>(consumerConfig); String groupId = "group"; configs.put(ConsumerConfig.GROUP_ID_CONFIG, groupId); Consumer<byte[], byte[]> consumer1 = clusterInstance.consumer(configs); Future<?> f1 = subscribeAndPoll(consumer1, Optional.empty()); waitForRebalance(2000, f1, null); Consumer<byte[], byte[]> consumer2 = clusterInstance.consumer(configs); Future<?> f2 = subscribeAndPoll(consumer2, Optional.empty()); waitForRebalance(2000, f2, consumer1); Future<?> rebalanceFuture = createConsumerToRebalance(groupId); Future<?> closeFuture1 = submitCloseAndValidate(consumer1, Long.MAX_VALUE, Optional.empty(), gracefulCloseTimeMs); waitForRebalance(2000, rebalanceFuture, consumer2); createConsumerToRebalance(groupId); // one more time clusterInstance.brokers().values().forEach(KafkaBroker::shutdown); Future<?> closeFuture2 = submitCloseAndValidate(consumer2, Long.MAX_VALUE, Optional.empty(), Optional.of(0L)); closeFuture1.get(2000, TimeUnit.MILLISECONDS); closeFuture2.get(2000, TimeUnit.MILLISECONDS); } Future<?> subscribeAndPoll(Consumer<byte[], byte[]> consumer, Optional<Semaphore> revokeSemaphore) { return executor.submit(() -> { consumer.subscribe(List.of(topic)); revokeSemaphore.ifPresent(Semaphore::release); consumer.poll(Duration.ofMillis(500)); return null; }); } void waitForRebalance(long timeoutMs, Future<?> future, Consumer<byte[], byte[]> otherConsumers) { long startMs = System.currentTimeMillis(); while (System.currentTimeMillis() < startMs + timeoutMs && !future.isDone()) { if (otherConsumers != null) { otherConsumers.poll(Duration.ofMillis(100)); } } assertTrue(future.isDone(), "Rebalance did not complete in time"); } Future<?> createConsumerToRebalance(String groupId) throws Exception { Consumer<byte[], byte[]> consumer = clusterInstance.consumer(Map.of(ConsumerConfig.GROUP_ID_CONFIG, groupId)); consumers.add(consumer); Semaphore rebalanceSemaphore = new Semaphore(0); Future<?> future = subscribeAndPoll(consumer, Optional.of(rebalanceSemaphore)); assertTrue(rebalanceSemaphore.tryAcquire(2000, TimeUnit.MILLISECONDS), "Rebalance not triggered"); assertFalse(future.isDone(), "Rebalance completed too early"); return future; } private Consumer<byte[], byte[]> createConsumerAndReceive(String groupId, boolean manualAssign, int numRecords, Map<String, String> consumerConfig) throws InterruptedException { Map<String, Object> configs = new HashMap<>(consumerConfig); configs.put(ConsumerConfig.GROUP_ID_CONFIG, groupId); Consumer<byte[], byte[]> consumer = clusterInstance.consumer(configs); ConsumerAssignmentPoller poller; if (manualAssign) { poller = new ConsumerAssignmentPoller(consumer, Set.of(topicPartition)); } else { poller = new ConsumerAssignmentPoller(consumer, List.of(topic)); } poller.start(); consumers.add(consumer); consumerPollers.add(poller); receiveExactRecords(poller, numRecords, 60000L); poller.shutdown(); return consumer; } private void restartDeadBrokers() { clusterInstance.brokers().forEach((id, broker) -> { if (broker.isShutdown()) { broker.startup(); } }); } private void checkClosedState(String groupId, int committedRecords) throws InterruptedException { // Check that close was graceful with offsets committed and leave group sent. // New instance of consumer should be assigned partitions immediately and should see committed offsets. Semaphore assignSemaphore = new Semaphore(0); Semaphore assignSemaphore = new Semaphore(0); try (Consumer<byte[], byte[]> consumer = clusterInstance.consumer(Map.of(ConsumerConfig.GROUP_ID_CONFIG, groupId))) { consumer.subscribe(List.of(topic), new ConsumerRebalanceListener() { @Override public void onPartitionsAssigned(Collection<TopicPartition> partitions) { assignSemaphore.release(); } @Override public void onPartitionsRevoked(Collection<TopicPartition> partitions) { // Do nothing } }); TestUtils.waitForCondition(() -> { consumer.poll(Duration.ofMillis(100)); return assignSemaphore.tryAcquire(); }, "Assignment did not complete on time"); if (committedRecords > 0) { Map<TopicPartition, OffsetAndMetadata> committed = consumer.committed(Set.of(topicPartition)); long offset = committed.get(topicPartition).offset(); assertEquals(committedRecords, offset, "Committed offset does not match expected value."); } } } private Future<?> submitCloseAndValidate( Consumer<byte[], byte[]> consumer, long closeTimeoutMs, Optional<Long> minCloseTimeMs, Optional<Long> maxCloseTimeMs) { return executor.submit(() -> { final long closeGraceTimeMs = 2000; long startMs = System.currentTimeMillis(); logger.info("Closing consumer with timeout {} ms.", closeTimeoutMs); consumer.close(CloseOptions.timeout(Duration.ofMillis(closeTimeoutMs))); long timeTakenMs = System.currentTimeMillis() - startMs; maxCloseTimeMs.ifPresent(ms -> assertTrue(timeTakenMs < ms + closeGraceTimeMs, "Close took too long " + timeTakenMs) ); minCloseTimeMs.ifPresent(ms -> assertTrue(timeTakenMs >= ms, "Close finished too quickly " + timeTakenMs) ); logger.info("consumer.close() completed in {} ms.", timeTakenMs); }, 0); } private void receiveExactRecords(ConsumerAssignmentPoller consumer, int numRecords, long timeoutMs) throws InterruptedException { TestUtils.waitForCondition(() -> consumer.receivedMessages() == numRecords, timeoutMs, String.format("Consumer did not receive expected %d. It received %d", numRecords, consumer.receivedMessages())); } // A mock class to represent broker bouncing (simulate broker restart behavior) private static class BounceBrokerScheduler extends ShutdownableThread { private final int numIters; private int iter = 0; final ClusterInstance clusterInstance; public BounceBrokerScheduler(int numIters, ClusterInstance clusterInstance) { super("daemon-bounce-broker", false); this.numIters = numIters; this.clusterInstance = clusterInstance; } private void killRandomBroker() { this.clusterInstance.shutdownBroker(TestUtils.randomSelect(clusterInstance.brokerIds())); } private void restartDeadBrokers() { clusterInstance.brokers().forEach((id, broker) -> { if (broker.isShutdown()) { broker.startup(); } }); } @Override public void doWork() { killRandomBroker(); assertDoesNotThrow(() -> Thread.sleep(500)); restartDeadBrokers(); iter++; if (iter == numIters) { initiateShutdown(); } else { assertDoesNotThrow(() -> Thread.sleep(500)); } } } }
google/j2objc
36,720
jre_emul/android/platform/libcore/ojluni/src/main/java/java/util/concurrent/ConcurrentLinkedQueue.java
/* * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * This file is available under and governed by the GNU General Public * License version 2 only, as published by the Free Software Foundation. * However, the following notice accompanied the original version of this * file: * * Written by Doug Lea and Martin Buchholz with assistance from members of * JCP JSR-166 Expert Group and released to the public domain, as explained * at http://creativecommons.org/publicdomain/zero/1.0/ */ package java.util.concurrent; /* J2ObjC removed import java.lang.invoke.MethodHandles; import java.lang.invoke.VarHandle; import java.util.function.Predicate; */ import java.util.AbstractQueue; import java.util.Arrays; import java.util.Collection; import java.util.Iterator; import java.util.NoSuchElementException; import java.util.Objects; import java.util.Queue; import java.util.Spliterator; import java.util.Spliterators; import java.util.function.Consumer; /** * An unbounded thread-safe {@linkplain Queue queue} based on linked nodes. * This queue orders elements FIFO (first-in-first-out). * The <em>head</em> of the queue is that element that has been on the * queue the longest time. * The <em>tail</em> of the queue is that element that has been on the * queue the shortest time. New elements * are inserted at the tail of the queue, and the queue retrieval * operations obtain elements at the head of the queue. * A {@code ConcurrentLinkedQueue} is an appropriate choice when * many threads will share access to a common collection. * Like most other concurrent collection implementations, this class * does not permit the use of {@code null} elements. * * <p>This implementation employs an efficient <em>non-blocking</em> * algorithm based on one described in * <a href="http://www.cs.rochester.edu/~scott/papers/1996_PODC_queues.pdf"> * Simple, Fast, and Practical Non-Blocking and Blocking Concurrent Queue * Algorithms</a> by Maged M. Michael and Michael L. Scott. * * <p>Iterators are <i>weakly consistent</i>, returning elements * reflecting the state of the queue at some point at or since the * creation of the iterator. They do <em>not</em> throw {@link * java.util.ConcurrentModificationException}, and may proceed concurrently * with other operations. Elements contained in the queue since the creation * of the iterator will be returned exactly once. * * <p>Beware that, unlike in most collections, the {@code size} method * is <em>NOT</em> a constant-time operation. Because of the * asynchronous nature of these queues, determining the current number * of elements requires a traversal of the elements, and so may report * inaccurate results if this collection is modified during traversal. * * <p>Bulk operations that add, remove, or examine multiple elements, * such as {@link #addAll}, {@link #removeIf} or {@link #forEach}, * are <em>not</em> guaranteed to be performed atomically. * For example, a {@code forEach} traversal concurrent with an {@code * addAll} operation might observe only some of the added elements. * * <p>This class and its iterator implement all of the <em>optional</em> * methods of the {@link Queue} and {@link Iterator} interfaces. * * <p>Memory consistency effects: As with other concurrent * collections, actions in a thread prior to placing an object into a * {@code ConcurrentLinkedQueue} * <a href="package-summary.html#MemoryVisibility"><i>happen-before</i></a> * actions subsequent to the access or removal of that element from * the {@code ConcurrentLinkedQueue} in another thread. * * <p>This class is a member of the * <a href="{@docRoot}/java.base/java/util/package-summary.html#CollectionsFramework"> * Java Collections Framework</a>. * * @since 1.5 * @author Doug Lea * @param <E> the type of elements held in this queue */ public class ConcurrentLinkedQueue<E> extends AbstractQueue<E> implements Queue<E>, java.io.Serializable { private static final long serialVersionUID = 196745693267521676L; /* * This is a modification of the Michael & Scott algorithm, * adapted for a garbage-collected environment, with support for * interior node deletion (to support e.g. remove(Object)). For * explanation, read the paper. * * Note that like most non-blocking algorithms in this package, * this implementation relies on the fact that in garbage * collected systems, there is no possibility of ABA problems due * to recycled nodes, so there is no need to use "counted * pointers" or related techniques seen in versions used in * non-GC'ed settings. * * The fundamental invariants are: * - There is exactly one (last) Node with a null next reference, * which is CASed when enqueueing. This last Node can be * reached in O(1) time from tail, but tail is merely an * optimization - it can always be reached in O(N) time from * head as well. * - The elements contained in the queue are the non-null items in * Nodes that are reachable from head. CASing the item * reference of a Node to null atomically removes it from the * queue. Reachability of all elements from head must remain * true even in the case of concurrent modifications that cause * head to advance. A dequeued Node may remain in use * indefinitely due to creation of an Iterator or simply a * poll() that has lost its time slice. * * The above might appear to imply that all Nodes are GC-reachable * from a predecessor dequeued Node. That would cause two problems: * - allow a rogue Iterator to cause unbounded memory retention * - cause cross-generational linking of old Nodes to new Nodes if * a Node was tenured while live, which generational GCs have a * hard time dealing with, causing repeated major collections. * However, only non-deleted Nodes need to be reachable from * dequeued Nodes, and reachability does not necessarily have to * be of the kind understood by the GC. We use the trick of * linking a Node that has just been dequeued to itself. Such a * self-link implicitly means to advance to head. * * Both head and tail are permitted to lag. In fact, failing to * update them every time one could is a significant optimization * (fewer CASes). As with LinkedTransferQueue (see the internal * documentation for that class), we use a slack threshold of two; * that is, we update head/tail when the current pointer appears * to be two or more steps away from the first/last node. * * Since head and tail are updated concurrently and independently, * it is possible for tail to lag behind head (why not)? * * CASing a Node's item reference to null atomically removes the * element from the queue, leaving a "dead" node that should later * be unlinked (but unlinking is merely an optimization). * Interior element removal methods (other than Iterator.remove()) * keep track of the predecessor node during traversal so that the * node can be CAS-unlinked. Some traversal methods try to unlink * any deleted nodes encountered during traversal. See comments * in bulkRemove. * * When constructing a Node (before enqueuing it) we avoid paying * for a volatile write to item. This allows the cost of enqueue * to be "one-and-a-half" CASes. * * Both head and tail may or may not point to a Node with a * non-null item. If the queue is empty, all items must of course * be null. Upon creation, both head and tail refer to a dummy * Node with null item. Both head and tail are only updated using * CAS, so they never regress, although again this is merely an * optimization. */ static final class Node<E> { volatile E item; volatile Node<E> next; } /** * Constructs a node holding item. Uses relaxed write because * item can only be seen after piggy-backing publication via CAS. */ static <E> Node<E> newNode(E item) { Node<E> node = new Node<E>(); U.putObject(node, ITEM, item); return node; } static <E> boolean casItem(Node<E> node, E cmp, E val) { return U.compareAndSwapObject(node, ITEM, cmp, val); } static <E> void lazySetNext(Node<E> node, Node<E> val) { U.putOrderedObject(node, NEXT, val); } static <E> boolean casNext(Node<E> node, Node<E> cmp, Node<E> val) { return U.compareAndSwapObject(node, NEXT, cmp, val); } /** * A node from which the first live (non-deleted) node (if any) * can be reached in O(1) time. * Invariants: * - all live nodes are reachable from head via succ() * - head != null * - (tmp = head).next != tmp || tmp != head * Non-invariants: * - head.item may or may not be null. * - it is permitted for tail to lag behind head, that is, for tail * to not be reachable from head! */ transient volatile Node<E> head; /** * A node from which the last node on list (that is, the unique * node with node.next == null) can be reached in O(1) time. * Invariants: * - the last node is always reachable from tail via succ() * - tail != null * Non-invariants: * - tail.item may or may not be null. * - it is permitted for tail to lag behind head, that is, for tail * to not be reachable from head! * - tail.next may or may not be self-linked. */ private transient volatile Node<E> tail; /** * Creates a {@code ConcurrentLinkedQueue} that is initially empty. */ public ConcurrentLinkedQueue() { head = tail = new Node<E>(); } /** * Creates a {@code ConcurrentLinkedQueue} * initially containing the elements of the given collection, * added in traversal order of the collection's iterator. * * @param c the collection of elements to initially contain * @throws NullPointerException if the specified collection or any * of its elements are null */ public ConcurrentLinkedQueue(Collection<? extends E> c) { Node<E> h = null, t = null; for (E e : c) { Node<E> newNode = newNode(Objects.requireNonNull(e)); if (h == null) h = t = newNode; else { lazySetNext(t, newNode); t = newNode; } } if (h == null) h = t = new Node<E>(); head = h; tail = t; } // Have to override just to update the javadoc /** * Inserts the specified element at the tail of this queue. * As the queue is unbounded, this method will never throw * {@link IllegalStateException} or return {@code false}. * * @return {@code true} (as specified by {@link Collection#add}) * @throws NullPointerException if the specified element is null */ public boolean add(E e) { return offer(e); } /** * Tries to CAS head to p. If successful, repoint old head to itself * as sentinel for succ(), below. */ final void updateHead(Node<E> h, Node<E> p) { // assert h != null && p != null && (h == p || h.item == null); if (h != p && casHead(h, p)) lazySetNext(h, sentinel()); } /** * Returns the successor of p, or the head node if p.next has been * linked to self, which will only be true if traversing with a * stale pointer that is now off the list. */ final Node<E> succ(Node<E> p) { Node<E> next = p.next; return (sentinel() == next) ? head : next; } /** * Inserts the specified element at the tail of this queue. * As the queue is unbounded, this method will never return {@code false}. * * @return {@code true} (as specified by {@link Queue#offer}) * @throws NullPointerException if the specified element is null */ public boolean offer(E e) { final Node<E> newNode = newNode(Objects.requireNonNull(e)); for (Node<E> t = tail, p = t;;) { Node<E> q = p.next; if (q == null) { // p is last node if (casNext(p, null, newNode)) { // Successful CAS is the linearization point // for e to become an element of this queue, // and for newNode to become "live". if (p != t) // hop two nodes at a time casTail(t, newNode); // Failure is OK. return true; } // Lost CAS race to another thread; re-read next } else if (sentinel() == q) // We have fallen off list. If tail is unchanged, it // will also be off-list, in which case we need to // jump to head, from which all live nodes are always // reachable. Else the new tail is a better bet. p = (t != (t = tail)) ? t : head; else // Check for tail updates after two hops. p = (p != t && t != (t = tail)) ? t : q; } } public E poll() { restartFromHead: for (;;) { for (Node<E> h = head, p = h, q;;) { E item = p.item; if (item != null && casItem(p, item, null)) { // Successful CAS is the linearization point // for item to be removed from this queue. if (p != h) // hop two nodes at a time updateHead(h, ((q = p.next) != null) ? q : p); return item; } else if ((q = p.next) == null) { updateHead(h, p); return null; } else if (sentinel() == q) continue restartFromHead; else p = q; } } } public E peek() { restartFromHead: for (;;) { for (Node<E> h = head, p = h, q;; p = q) { final E item; if ((item = p.item) != null || (q = p.next) == null) { updateHead(h, p); return item; } else if (p == q) continue restartFromHead; } } } /** * Returns the first live (non-deleted) node on list, or null if none. * This is yet another variant of poll/peek; here returning the * first node, not element. We could make peek() a wrapper around * first(), but that would cost an extra volatile read of item, * and the need to add a retry loop to deal with the possibility * of losing a race to a concurrent poll(). */ Node<E> first() { restartFromHead: for (;;) { for (Node<E> h = head, p = h, q;; p = q) { boolean hasItem = (p.item != null); if (hasItem || (q = p.next) == null) { updateHead(h, p); return hasItem ? p : null; } else if (sentinel() == q) continue restartFromHead; } } } /** * Returns {@code true} if this queue contains no elements. * * @return {@code true} if this queue contains no elements */ public boolean isEmpty() { return first() == null; } /** * Returns the number of elements in this queue. If this queue * contains more than {@code Integer.MAX_VALUE} elements, returns * {@code Integer.MAX_VALUE}. * * <p>Beware that, unlike in most collections, this method is * <em>NOT</em> a constant-time operation. Because of the * asynchronous nature of these queues, determining the current * number of elements requires an O(n) traversal. * Additionally, if elements are added or removed during execution * of this method, the returned result may be inaccurate. Thus, * this method is typically not very useful in concurrent * applications. * * @return the number of elements in this queue */ public int size() { restartFromHead: for (;;) { int count = 0; for (Node<E> p = first(); p != null;) { if (p.item != null) if (++count == Integer.MAX_VALUE) break; // @see Collection.size() if (sentinel() == (p = p.next)) continue restartFromHead; } return count; } } /** * Returns {@code true} if this queue contains the specified element. * More formally, returns {@code true} if and only if this queue contains * at least one element {@code e} such that {@code o.equals(e)}. * * @param o object to be checked for containment in this queue * @return {@code true} if this queue contains the specified element */ public boolean contains(Object o) { if (o != null) { for (Node<E> p = first(); p != null; p = succ(p)) { E item = p.item; if (item != null && o.equals(item)) return true; } } return false; } /** * Removes a single instance of the specified element from this queue, * if it is present. More formally, removes an element {@code e} such * that {@code o.equals(e)}, if this queue contains one or more such * elements. * Returns {@code true} if this queue contained the specified element * (or equivalently, if this queue changed as a result of the call). * * @param o element to be removed from this queue, if present * @return {@code true} if this queue changed as a result of the call */ public boolean remove(Object o) { if (o != null) { Node<E> next, pred = null; for (Node<E> p = first(); p != null; pred = p, p = next) { boolean removed = false; E item = p.item; if (item != null) { if (!o.equals(item)) { next = succ(p); continue; } removed = casItem(p, item, null); } next = succ(p); if (pred != null && next != null) // unlink casNext(pred, p, next); if (removed) return true; } } return false; } /** * Appends all of the elements in the specified collection to the end of * this queue, in the order that they are returned by the specified * collection's iterator. Attempts to {@code addAll} of a queue to * itself result in {@code IllegalArgumentException}. * * @param c the elements to be inserted into this queue * @return {@code true} if this queue changed as a result of the call * @throws NullPointerException if the specified collection or any * of its elements are null * @throws IllegalArgumentException if the collection is this queue */ public boolean addAll(Collection<? extends E> c) { if (c == this) // As historically specified in AbstractQueue#addAll throw new IllegalArgumentException(); // Copy c into a private chain of Nodes Node<E> beginningOfTheEnd = null, last = null; for (E e : c) { Node<E> newNode = newNode(Objects.requireNonNull(e)); if (beginningOfTheEnd == null) beginningOfTheEnd = last = newNode; else { lazySetNext(last, newNode); last = newNode; } } if (beginningOfTheEnd == null) return false; // Atomically append the chain at the tail of this collection for (Node<E> t = tail, p = t;;) { Node<E> q = p.next; if (q == null) { // p is last node if (casNext(p, null, beginningOfTheEnd)) { // Successful CAS is the linearization point // for all elements to be added to this queue. if (!casTail(t, last)) { // Try a little harder to update tail, // since we may be adding many elements. t = tail; if (last.next == null) casTail(t, last); } return true; } // Lost CAS race to another thread; re-read next } else if (sentinel() == q) // We have fallen off list. If tail is unchanged, it // will also be off-list, in which case we need to // jump to head, from which all live nodes are always // reachable. Else the new tail is a better bet. p = (t != (t = tail)) ? t : head; else // Check for tail updates after two hops. p = (p != t && t != (t = tail)) ? t : q; } } public String toString() { String[] a = null; restartFromHead: for (;;) { int charLength = 0; int size = 0; for (Node<E> p = first(); p != null;) { final E item; if ((item = p.item) != null) { if (a == null) a = new String[4]; else if (size == a.length) a = Arrays.copyOf(a, 2 * size); String s = item.toString(); a[size++] = s; charLength += s.length(); } if (sentinel() == (p = p.next)) continue restartFromHead; } if (size == 0) return "[]"; return Helpers.toString(a, size, charLength); } } private Object[] toArrayInternal(Object[] a) { Object[] x = a; restartFromHead: for (;;) { int size = 0; for (Node<E> p = first(); p != null;) { final E item; if ((item = p.item) != null) { if (x == null) x = new Object[4]; else if (size == x.length) x = Arrays.copyOf(x, 2 * (size + 4)); x[size++] = item; } if (sentinel() == (p = p.next)) continue restartFromHead; } if (x == null) return new Object[0]; else if (a != null && size <= a.length) { if (a != x) System.arraycopy(x, 0, a, 0, size); if (size < a.length) a[size] = null; return a; } return (size == x.length) ? x : Arrays.copyOf(x, size); } } /** * Returns an array containing all of the elements in this queue, in * proper sequence. * * <p>The returned array will be "safe" in that no references to it are * maintained by this queue. (In other words, this method must allocate * a new array). The caller is thus free to modify the returned array. * * <p>This method acts as bridge between array-based and collection-based * APIs. * * @return an array containing all of the elements in this queue */ public Object[] toArray() { return toArrayInternal(null); } /** * Returns an array containing all of the elements in this queue, in * proper sequence; the runtime type of the returned array is that of * the specified array. If the queue fits in the specified array, it * is returned therein. Otherwise, a new array is allocated with the * runtime type of the specified array and the size of this queue. * * <p>If this queue fits in the specified array with room to spare * (i.e., the array has more elements than this queue), the element in * the array immediately following the end of the queue is set to * {@code null}. * * <p>Like the {@link #toArray()} method, this method acts as bridge between * array-based and collection-based APIs. Further, this method allows * precise control over the runtime type of the output array, and may, * under certain circumstances, be used to save allocation costs. * * <p>Suppose {@code x} is a queue known to contain only strings. * The following code can be used to dump the queue into a newly * allocated array of {@code String}: * * <pre> {@code String[] y = x.toArray(new String[0]);}</pre> * * Note that {@code toArray(new Object[0])} is identical in function to * {@code toArray()}. * * @param a the array into which the elements of the queue are to * be stored, if it is big enough; otherwise, a new array of the * same runtime type is allocated for this purpose * @return an array containing all of the elements in this queue * @throws ArrayStoreException if the runtime type of the specified array * is not a supertype of the runtime type of every element in * this queue * @throws NullPointerException if the specified array is null */ @SuppressWarnings("unchecked") public <T> T[] toArray(T[] a) { Objects.requireNonNull(a); return (T[]) toArrayInternal(a); } /** * Returns an iterator over the elements in this queue in proper sequence. * The elements will be returned in order from first (head) to last (tail). * * <p>The returned iterator is * <a href="package-summary.html#Weakly"><i>weakly consistent</i></a>. * * @return an iterator over the elements in this queue in proper sequence */ public Iterator<E> iterator() { return new Itr(); } private class Itr implements Iterator<E> { /** * Next node to return item for. */ private Node<E> nextNode; /** * nextItem holds on to item fields because once we claim * that an element exists in hasNext(), we must return it in * the following next() call even if it was in the process of * being removed when hasNext() was called. */ private E nextItem; /** * Node of the last returned item, to support remove. */ private Node<E> lastRet; Itr() { restartFromHead: for (;;) { Node<E> h, p, q; for (p = h = head;; p = q) { final E item; if ((item = p.item) != null) { nextNode = p; nextItem = item; break; } else if ((q = p.next) == null) break; else if (sentinel() == q) continue restartFromHead; } updateHead(h, p); return; } } public boolean hasNext() { return nextItem != null; } public E next() { final Node<E> pred = nextNode; if (pred == null) throw new NoSuchElementException(); // assert nextItem != null; lastRet = pred; E item = null; for (Node<E> p = succ(pred), q;; p = q) { if (p == null || (item = p.item) != null) { nextNode = p; E x = nextItem; nextItem = item; return x; } // unlink deleted nodes if ((q = succ(p)) != null) casNext(pred, p, q); } } // Default implementation of forEachRemaining is "good enough". public void remove() { Node<E> l = lastRet; if (l == null) throw new IllegalStateException(); // rely on a future traversal to relink. l.item = null; lastRet = null; } } /** * Saves this queue to a stream (that is, serializes it). * * @param s the stream * @throws java.io.IOException if an I/O error occurs * @serialData All of the elements (each an {@code E}) in * the proper order, followed by a null */ private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException { // Write out any hidden stuff s.defaultWriteObject(); // Write out all elements in the proper order. for (Node<E> p = first(); p != null; p = succ(p)) { final E item; if ((item = p.item) != null) s.writeObject(item); } // Use trailing null as sentinel s.writeObject(null); } /** * Reconstitutes this queue from a stream (that is, deserializes it). * @param s the stream * @throws ClassNotFoundException if the class of a serialized object * could not be found * @throws java.io.IOException if an I/O error occurs */ private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException { s.defaultReadObject(); // Read in elements until trailing null sentinel found Node<E> h = null, t = null; for (Object item; (item = s.readObject()) != null; ) { @SuppressWarnings("unchecked") Node<E> newNode = newNode((E) item); if (h == null) h = t = newNode; else { lazySetNext(t, newNode); t = newNode; } } if (h == null) h = t = new Node<E>(); head = h; tail = t; } /** A customized variant of Spliterators.IteratorSpliterator */ static final class CLQSpliterator<E> implements Spliterator<E> { static final int MAX_BATCH = 1 << 25; // max batch array size; final ConcurrentLinkedQueue<E> queue; Node<E> current; // current node; null until initialized int batch; // batch size for splits boolean exhausted; // true when no more nodes CLQSpliterator(ConcurrentLinkedQueue<E> queue) { this.queue = queue; } public Spliterator<E> trySplit() { Node<E> p; final ConcurrentLinkedQueue<E> q = this.queue; int b = batch; int n = (b <= 0) ? 1 : (b >= MAX_BATCH) ? MAX_BATCH : b + 1; if (!exhausted && ((p = current) != null || (p = q.first()) != null) && p.next != null) { Object[] a = new Object[n]; int i = 0; do { if ((a[i] = p.item) != null) ++i; if (q.sentinel() == (p = p.next)) p = q.first(); } while (p != null && i < n); if ((current = p) == null) exhausted = true; if (i > 0) { batch = i; return Spliterators.spliterator (a, 0, i, (Spliterator.ORDERED | Spliterator.NONNULL | Spliterator.CONCURRENT)); } } return null; } public void forEachRemaining(Consumer<? super E> action) { Node<E> p; if (action == null) throw new NullPointerException(); final ConcurrentLinkedQueue<E> q = this.queue; if (!exhausted && ((p = current) != null || (p = q.first()) != null)) { exhausted = true; do { E e = p.item; if (q.sentinel() == (p = p.next)) p = q.first(); if (e != null) action.accept(e); } while (p != null); } } public boolean tryAdvance(Consumer<? super E> action) { Objects.requireNonNull(action); Node<E> p; if (action == null) throw new NullPointerException(); final ConcurrentLinkedQueue<E> q = this.queue; if (!exhausted && ((p = current) != null || (p = q.first()) != null)) { E e; do { e = p.item; if (q.sentinel() == (p = p.next)) p = q.first(); } while (e == null && p != null); setCurrent(p); if (e != null) { action.accept(e); return true; } } return false; } private void setCurrent(Node<E> p) { if ((current = p) == null) exhausted = true; } public long estimateSize() { return Long.MAX_VALUE; } public int characteristics() { return (Spliterator.ORDERED | Spliterator.NONNULL | Spliterator.CONCURRENT); } } /** * Returns a {@link Spliterator} over the elements in this queue. * * <p>The returned spliterator is * <a href="package-summary.html#Weakly"><i>weakly consistent</i></a>. * * <p>The {@code Spliterator} reports {@link Spliterator#CONCURRENT}, * {@link Spliterator#ORDERED}, and {@link Spliterator#NONNULL}. * * @implNote * The {@code Spliterator} implements {@code trySplit} to permit limited * parallelism. * * @return a {@code Spliterator} over the elements in this queue * @since 1.8 */ @Override public Spliterator<E> spliterator() { return new CLQSpliterator<E>(this); } private boolean casTail(Node<E> cmp, Node<E> val) { return U.compareAndSwapObject(this, TAIL, cmp, val); } private boolean casHead(Node<E> cmp, Node<E> val) { return U.compareAndSwapObject(this, HEAD, cmp, val); } private static final sun.misc.Unsafe U = sun.misc.Unsafe.getUnsafe(); private static final long HEAD; private static final long TAIL; private static final long ITEM; private static final long NEXT; static { try { HEAD = U.objectFieldOffset (ConcurrentLinkedQueue.class.getDeclaredField("head")); TAIL = U.objectFieldOffset (ConcurrentLinkedQueue.class.getDeclaredField("tail")); ITEM = U.objectFieldOffset (Node.class.getDeclaredField("item")); NEXT = U.objectFieldOffset (Node.class.getDeclaredField("next")); } catch (ReflectiveOperationException e) { throw new Error(e); } } // J2ObjC: newNode() requires ITEM to be initialized first. private static final Node<Object> SENTINEL = newNode(null); private Node<E> sentinel() { return (Node<E>) SENTINEL; } }
googleapis/google-cloud-java
36,486
java-securitycenter/proto-google-cloud-securitycenter-v1p1beta1/src/main/java/com/google/cloud/securitycenter/v1p1beta1/SetFindingStateRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/securitycenter/v1p1beta1/securitycenter_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.securitycenter.v1p1beta1; /** * * * <pre> * Request message for updating a finding's state. * </pre> * * Protobuf type {@code google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest} */ public final class SetFindingStateRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest) SetFindingStateRequestOrBuilder { private static final long serialVersionUID = 0L; // Use SetFindingStateRequest.newBuilder() to construct. private SetFindingStateRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SetFindingStateRequest() { name_ = ""; state_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new SetFindingStateRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securitycenter.v1p1beta1.SecuritycenterService .internal_static_google_cloud_securitycenter_v1p1beta1_SetFindingStateRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securitycenter.v1p1beta1.SecuritycenterService .internal_static_google_cloud_securitycenter_v1p1beta1_SetFindingStateRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest.class, com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest.Builder.class); } private int bitField0_; public static final int NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * Required. The relative resource name of the finding. See: * https://cloud.google.com/apis/design/resource_names#relative_resource_name * Example: * "organizations/{organization_id}/sources/{source_id}/finding/{finding_id}". * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Required. The relative resource name of the finding. See: * https://cloud.google.com/apis/design/resource_names#relative_resource_name * Example: * "organizations/{organization_id}/sources/{source_id}/finding/{finding_id}". * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int STATE_FIELD_NUMBER = 2; private int state_ = 0; /** * * * <pre> * Required. The desired State of the finding. * </pre> * * <code> * .google.cloud.securitycenter.v1p1beta1.Finding.State state = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The enum numeric value on the wire for state. */ @java.lang.Override public int getStateValue() { return state_; } /** * * * <pre> * Required. The desired State of the finding. * </pre> * * <code> * .google.cloud.securitycenter.v1p1beta1.Finding.State state = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The state. */ @java.lang.Override public com.google.cloud.securitycenter.v1p1beta1.Finding.State getState() { com.google.cloud.securitycenter.v1p1beta1.Finding.State result = com.google.cloud.securitycenter.v1p1beta1.Finding.State.forNumber(state_); return result == null ? com.google.cloud.securitycenter.v1p1beta1.Finding.State.UNRECOGNIZED : result; } public static final int START_TIME_FIELD_NUMBER = 3; private com.google.protobuf.Timestamp startTime_; /** * * * <pre> * Required. The time at which the updated state takes effect. * </pre> * * <code>.google.protobuf.Timestamp start_time = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the startTime field is set. */ @java.lang.Override public boolean hasStartTime() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The time at which the updated state takes effect. * </pre> * * <code>.google.protobuf.Timestamp start_time = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The startTime. */ @java.lang.Override public com.google.protobuf.Timestamp getStartTime() { return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_; } /** * * * <pre> * Required. The time at which the updated state takes effect. * </pre> * * <code>.google.protobuf.Timestamp start_time = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder() { return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (state_ != com.google.cloud.securitycenter.v1p1beta1.Finding.State.STATE_UNSPECIFIED.getNumber()) { output.writeEnum(2, state_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(3, getStartTime()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (state_ != com.google.cloud.securitycenter.v1p1beta1.Finding.State.STATE_UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(2, state_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getStartTime()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest)) { return super.equals(obj); } com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest other = (com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest) obj; if (!getName().equals(other.getName())) return false; if (state_ != other.state_) return false; if (hasStartTime() != other.hasStartTime()) return false; if (hasStartTime()) { if (!getStartTime().equals(other.getStartTime())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (37 * hash) + STATE_FIELD_NUMBER; hash = (53 * hash) + state_; if (hasStartTime()) { hash = (37 * hash) + START_TIME_FIELD_NUMBER; hash = (53 * hash) + getStartTime().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for updating a finding's state. * </pre> * * Protobuf type {@code google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest) com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securitycenter.v1p1beta1.SecuritycenterService .internal_static_google_cloud_securitycenter_v1p1beta1_SetFindingStateRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securitycenter.v1p1beta1.SecuritycenterService .internal_static_google_cloud_securitycenter_v1p1beta1_SetFindingStateRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest.class, com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest.Builder.class); } // Construct using com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getStartTimeFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; name_ = ""; state_ = 0; startTime_ = null; if (startTimeBuilder_ != null) { startTimeBuilder_.dispose(); startTimeBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.securitycenter.v1p1beta1.SecuritycenterService .internal_static_google_cloud_securitycenter_v1p1beta1_SetFindingStateRequest_descriptor; } @java.lang.Override public com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest getDefaultInstanceForType() { return com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest build() { com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest buildPartial() { com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest result = new com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.state_ = state_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000004) != 0)) { result.startTime_ = startTimeBuilder_ == null ? startTime_ : startTimeBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest) { return mergeFrom((com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest other) { if (other == com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000001; onChanged(); } if (other.state_ != 0) { setStateValue(other.getStateValue()); } if (other.hasStartTime()) { mergeStartTime(other.getStartTime()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { state_ = input.readEnum(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { input.readMessage(getStartTimeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * * * <pre> * Required. The relative resource name of the finding. See: * https://cloud.google.com/apis/design/resource_names#relative_resource_name * Example: * "organizations/{organization_id}/sources/{source_id}/finding/{finding_id}". * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The relative resource name of the finding. See: * https://cloud.google.com/apis/design/resource_names#relative_resource_name * Example: * "organizations/{organization_id}/sources/{source_id}/finding/{finding_id}". * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The relative resource name of the finding. See: * https://cloud.google.com/apis/design/resource_names#relative_resource_name * Example: * "organizations/{organization_id}/sources/{source_id}/finding/{finding_id}". * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The relative resource name of the finding. See: * https://cloud.google.com/apis/design/resource_names#relative_resource_name * Example: * "organizations/{organization_id}/sources/{source_id}/finding/{finding_id}". * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The relative resource name of the finding. See: * https://cloud.google.com/apis/design/resource_names#relative_resource_name * Example: * "organizations/{organization_id}/sources/{source_id}/finding/{finding_id}". * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int state_ = 0; /** * * * <pre> * Required. The desired State of the finding. * </pre> * * <code> * .google.cloud.securitycenter.v1p1beta1.Finding.State state = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The enum numeric value on the wire for state. */ @java.lang.Override public int getStateValue() { return state_; } /** * * * <pre> * Required. The desired State of the finding. * </pre> * * <code> * .google.cloud.securitycenter.v1p1beta1.Finding.State state = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @param value The enum numeric value on the wire for state to set. * @return This builder for chaining. */ public Builder setStateValue(int value) { state_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The desired State of the finding. * </pre> * * <code> * .google.cloud.securitycenter.v1p1beta1.Finding.State state = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The state. */ @java.lang.Override public com.google.cloud.securitycenter.v1p1beta1.Finding.State getState() { com.google.cloud.securitycenter.v1p1beta1.Finding.State result = com.google.cloud.securitycenter.v1p1beta1.Finding.State.forNumber(state_); return result == null ? com.google.cloud.securitycenter.v1p1beta1.Finding.State.UNRECOGNIZED : result; } /** * * * <pre> * Required. The desired State of the finding. * </pre> * * <code> * .google.cloud.securitycenter.v1p1beta1.Finding.State state = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @param value The state to set. * @return This builder for chaining. */ public Builder setState(com.google.cloud.securitycenter.v1p1beta1.Finding.State value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; state_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Required. The desired State of the finding. * </pre> * * <code> * .google.cloud.securitycenter.v1p1beta1.Finding.State state = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return This builder for chaining. */ public Builder clearState() { bitField0_ = (bitField0_ & ~0x00000002); state_ = 0; onChanged(); return this; } private com.google.protobuf.Timestamp startTime_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> startTimeBuilder_; /** * * * <pre> * Required. The time at which the updated state takes effect. * </pre> * * <code>.google.protobuf.Timestamp start_time = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the startTime field is set. */ public boolean hasStartTime() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * Required. The time at which the updated state takes effect. * </pre> * * <code>.google.protobuf.Timestamp start_time = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The startTime. */ public com.google.protobuf.Timestamp getStartTime() { if (startTimeBuilder_ == null) { return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_; } else { return startTimeBuilder_.getMessage(); } } /** * * * <pre> * Required. The time at which the updated state takes effect. * </pre> * * <code>.google.protobuf.Timestamp start_time = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setStartTime(com.google.protobuf.Timestamp value) { if (startTimeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } startTime_ = value; } else { startTimeBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. The time at which the updated state takes effect. * </pre> * * <code>.google.protobuf.Timestamp start_time = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setStartTime(com.google.protobuf.Timestamp.Builder builderForValue) { if (startTimeBuilder_ == null) { startTime_ = builderForValue.build(); } else { startTimeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. The time at which the updated state takes effect. * </pre> * * <code>.google.protobuf.Timestamp start_time = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeStartTime(com.google.protobuf.Timestamp value) { if (startTimeBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && startTime_ != null && startTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { getStartTimeBuilder().mergeFrom(value); } else { startTime_ = value; } } else { startTimeBuilder_.mergeFrom(value); } if (startTime_ != null) { bitField0_ |= 0x00000004; onChanged(); } return this; } /** * * * <pre> * Required. The time at which the updated state takes effect. * </pre> * * <code>.google.protobuf.Timestamp start_time = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearStartTime() { bitField0_ = (bitField0_ & ~0x00000004); startTime_ = null; if (startTimeBuilder_ != null) { startTimeBuilder_.dispose(); startTimeBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The time at which the updated state takes effect. * </pre> * * <code>.google.protobuf.Timestamp start_time = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.Timestamp.Builder getStartTimeBuilder() { bitField0_ |= 0x00000004; onChanged(); return getStartTimeFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The time at which the updated state takes effect. * </pre> * * <code>.google.protobuf.Timestamp start_time = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder() { if (startTimeBuilder_ != null) { return startTimeBuilder_.getMessageOrBuilder(); } else { return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_; } } /** * * * <pre> * Required. The time at which the updated state takes effect. * </pre> * * <code>.google.protobuf.Timestamp start_time = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> getStartTimeFieldBuilder() { if (startTimeBuilder_ == null) { startTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( getStartTime(), getParentForChildren(), isClean()); startTime_ = null; } return startTimeBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest) } // @@protoc_insertion_point(class_scope:google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest) private static final com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest(); } public static com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SetFindingStateRequest> PARSER = new com.google.protobuf.AbstractParser<SetFindingStateRequest>() { @java.lang.Override public SetFindingStateRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<SetFindingStateRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SetFindingStateRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.securitycenter.v1p1beta1.SetFindingStateRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/ignite-3
36,665
modules/raft/src/integrationTest/java/org/apache/ignite/raft/server/ItJraftCounterServerTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.raft.server; import static java.util.Comparator.comparing; import static java.util.stream.Collectors.toList; import static java.util.stream.Collectors.toSet; import static org.apache.ignite.internal.raft.server.RaftGroupOptions.defaults; import static org.apache.ignite.internal.testframework.matchers.CompletableFutureMatcher.willCompleteSuccessfully; import static org.apache.ignite.raft.jraft.core.State.STATE_ERROR; import static org.apache.ignite.raft.jraft.core.State.STATE_LEADER; import static org.apache.ignite.raft.jraft.test.TestUtils.waitForCondition; import static org.apache.ignite.raft.jraft.test.TestUtils.waitForTopology; import static org.apache.ignite.raft.server.counter.GetValueCommand.getValueCommand; import static org.apache.ignite.raft.server.counter.IncrementAndGetCommand.incrementAndGetCommand; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; import java.util.function.Supplier; import java.util.stream.Stream; import org.apache.ignite.internal.configuration.ComponentWorkingDir; import org.apache.ignite.internal.lang.IgniteInternalException; import org.apache.ignite.internal.manager.ComponentContext; import org.apache.ignite.internal.raft.Peer; import org.apache.ignite.internal.raft.RaftNodeId; import org.apache.ignite.internal.raft.ReadCommand; import org.apache.ignite.internal.raft.WriteCommand; import org.apache.ignite.internal.raft.server.RaftGroupOptions; import org.apache.ignite.internal.raft.server.RaftServer; import org.apache.ignite.internal.raft.server.impl.JraftServerImpl; import org.apache.ignite.internal.raft.service.CommandClosure; import org.apache.ignite.internal.raft.service.RaftGroupService; import org.apache.ignite.internal.raft.util.ThreadLocalOptimizedMarshaller; import org.apache.ignite.internal.replicator.ReplicationGroupId; import org.apache.ignite.internal.replicator.TestReplicationGroupId; import org.apache.ignite.internal.util.IgniteUtils; import org.apache.ignite.raft.jraft.core.NodeImpl; import org.apache.ignite.raft.jraft.core.StateMachineAdapter; import org.apache.ignite.raft.jraft.entity.RaftOutter.SnapshotMeta; import org.apache.ignite.raft.jraft.rpc.impl.RaftException; import org.apache.ignite.raft.jraft.util.ExecutorServiceHelper; import org.apache.ignite.raft.messages.TestRaftMessagesFactory; import org.apache.ignite.raft.server.counter.CounterListener; import org.apache.ignite.raft.server.counter.GetValueCommand; import org.apache.ignite.raft.server.counter.IncrementAndGetCommand; import org.apache.ignite.raft.server.snasphot.SnapshotInMemoryStorageFactory; import org.apache.ignite.raft.server.snasphot.UpdateCountRaftListener; import org.junit.jupiter.api.Test; /** * Jraft server. */ class ItJraftCounterServerTest extends JraftAbstractTest { /** * Counter group name 0. */ private static final TestReplicationGroupId COUNTER_GROUP_0 = new TestReplicationGroupId("counter0"); /** * Counter group name 1. */ private static final TestReplicationGroupId COUNTER_GROUP_1 = new TestReplicationGroupId("counter1"); /** Amount of stripes for disruptors that are used by JRAFT. */ private static final int RAFT_STRIPES = 3; /** Amount of stripes for disruptors that are used by the log service for JRAFT. */ private static final int RAFT_LOG_STRIPES = 1; /** * Listener factory. */ private Supplier<CounterListener> listenerFactory = CounterListener::new; /** * Starts a cluster for the test. * * @throws Exception If failed. */ private void startCluster() throws Exception { for (int i = 0; i < 3; i++) { int finalI = i; startServer(i, raftServer -> { String localNodeName = raftServer.clusterService().topologyService().localMember().name(); Peer serverPeer = initialMembersConf.peer(localNodeName); RaftGroupOptions groupOptions = groupOptions(raftServer); groupOptions.setLogStorageFactory(logStorageFactories.get(finalI)); groupOptions.serverDataPath(serverWorkingDirs.get(finalI).metaPath()); raftServer.startRaftNode( new RaftNodeId(COUNTER_GROUP_0, serverPeer), initialMembersConf, listenerFactory.get(), groupOptions ); raftServer.startRaftNode( new RaftNodeId(COUNTER_GROUP_1, serverPeer), initialMembersConf, listenerFactory.get(), groupOptions ); }, opts -> {}); } startClient(COUNTER_GROUP_0); startClient(COUNTER_GROUP_1); } /** * Checks that the number of Disruptor threads does not depend on count started RAFT nodes. */ @Test public void testDisruptorThreadsCount() { startServer(0, raftServer -> { String localNodeName = raftServer.clusterService().topologyService().localMember().name(); var nodeId = new RaftNodeId(new TestReplicationGroupId("test_raft_group"), initialMembersConf.peer(localNodeName)); raftServer.startRaftNode( nodeId, initialMembersConf, listenerFactory.get(), groupOptions(raftServer) .setLogStorageFactory(logStorageFactories.get(0)) .serverDataPath(serverWorkingDirs.get(0).metaPath()) ); }, opts -> { opts.setStripes(RAFT_STRIPES); opts.setLogStripesCount(RAFT_LOG_STRIPES); opts.setLogYieldStrategy(true); }); Set<Thread> threads = getAllDisruptorCurrentThreads(); int threadsBefore = threads.size(); Set<String> threadNamesBefore = threads.stream().map(Thread::getName).collect(toSet()); assertEquals(RAFT_STRIPES * 3/* services */ + RAFT_LOG_STRIPES, threadsBefore, "Started thread names: " + threadNamesBefore); for (int j = 0; j < servers.size(); j++) { JraftServerImpl srv = servers.get(j); String localNodeName = srv.clusterService().topologyService().localMember().name(); Peer serverPeer = initialMembersConf.peer(localNodeName); for (int i = 0; i < 10; i++) { var nodeId = new RaftNodeId(new TestReplicationGroupId("test_raft_group_" + i), serverPeer); srv.startRaftNode( nodeId, initialMembersConf, listenerFactory.get(), groupOptions(srv) .setLogStorageFactory(logStorageFactories.get(j)) .serverDataPath(serverWorkingDirs.get(j).metaPath()) ); } } threads = getAllDisruptorCurrentThreads(); int threadsAfter = threads.size(); Set<String> threadNamesAfter = threads.stream().map(Thread::getName).collect(toSet()); threadNamesAfter.removeAll(threadNamesBefore); assertEquals(threadsBefore, threadsAfter, "Difference: " + threadNamesAfter); } /** * Get a set of Disruptor threads for the well known JRaft services. * * @return Set of Disruptor threads. */ private Set<Thread> getAllDisruptorCurrentThreads() { return Thread.getAllStackTraces().keySet().stream().filter(t -> t.getName().contains("JRaft-FSMCaller-Disruptor") || t.getName().contains("JRaft-NodeImpl-Disruptor") || t.getName().contains("JRaft-ReadOnlyService-Disruptor") || t.getName().contains("JRaft-LogManager-Disruptor")) .collect(toSet()); } @Test public void testRefreshLeader() throws Exception { startCluster(); Peer leader = clients.get(0).leader(); assertNull(leader); clients.get(0).refreshLeader().get(); assertNotNull(clients.get(0).leader()); leader = clients.get(1).leader(); assertNull(leader); clients.get(1).refreshLeader().get(); assertNotNull(clients.get(1).leader()); } @Test public void testCounterCommandListener() throws Exception { startCluster(); RaftGroupService client1 = clients.get(0); RaftGroupService client2 = clients.get(1); client1.refreshLeader().get(); client2.refreshLeader().get(); assertNotNull(client1.leader()); assertNotNull(client2.leader()); assertEquals(2, client1.<Long>run(incrementAndGetCommand(2)).get()); assertEquals(2, client1.<Long>run(getValueCommand()).get()); assertEquals(3, client1.<Long>run(incrementAndGetCommand(1)).get()); assertEquals(3, client1.<Long>run(getValueCommand()).get()); assertEquals(4, client2.<Long>run(incrementAndGetCommand(4)).get()); assertEquals(4, client2.<Long>run(getValueCommand()).get()); assertEquals(7, client2.<Long>run(incrementAndGetCommand(3)).get()); assertEquals(7, client2.<Long>run(getValueCommand()).get()); } @Test public void testCreateSnapshot() throws Exception { startCluster(); RaftGroupService client1 = clients.get(0); RaftGroupService client2 = clients.get(1); client1.refreshLeader().get(); client2.refreshLeader().get(); JraftServerImpl server = servers.get(0); ComponentWorkingDir serverWorkingDir = serverWorkingDirs.get(0); long val = applyIncrements(client1, 1, 10); assertEquals(sum(10), val); Peer localPeer0 = server.localPeers(COUNTER_GROUP_0).get(0); client1.snapshot(localPeer0, false).get(); long val2 = applyIncrements(client2, 1, 20); assertEquals(sum(20), val2); Peer localPeer1 = server.localPeers(COUNTER_GROUP_1).get(0); client2.snapshot(localPeer1, false).get(); Path snapshotDir0 = JraftServerImpl.getServerDataPath( serverWorkingDir.metaPath(), new RaftNodeId(COUNTER_GROUP_0, localPeer0) ).resolve("snapshot"); assertEquals(1L, countFiles(snapshotDir0)); Path snapshotDir1 = JraftServerImpl.getServerDataPath( serverWorkingDir.metaPath(), new RaftNodeId(COUNTER_GROUP_1, localPeer1) ).resolve("snapshot"); assertEquals(1L, countFiles(snapshotDir1)); } /** * Returns the number of files in the given directory (non-recursive). */ private static long countFiles(Path dir) throws IOException { try (Stream<Path> files = Files.list(dir)) { return files.count(); } } @Test public void testCreateSnapshotGracefulFailure() throws Exception { listenerFactory = () -> new CounterListener() { @Override public void onSnapshotSave(Path path, Consumer<Throwable> doneClo) { doneClo.accept(new IgniteInternalException("Very bad")); } }; startCluster(); RaftGroupService client1 = clients.get(0); RaftGroupService client2 = clients.get(1); client1.refreshLeader().get(); client2.refreshLeader().get(); RaftServer server = servers.get(0); Peer peer = server.localPeers(COUNTER_GROUP_0).get(0); long val = applyIncrements(client1, 1, 10); assertEquals(sum(10), val); try { client1.snapshot(peer, false).get(); fail(); } catch (Exception e) { assertTrue(e.getCause() instanceof RaftException); } } @Test public void testCreateSnapshotAbnormalFailure() throws Exception { listenerFactory = () -> new CounterListener() { @Override public void onSnapshotSave(Path path, Consumer<Throwable> doneClo) { doneClo.accept(new IgniteInternalException("Very bad")); } }; startCluster(); RaftGroupService client1 = clients.get(0); RaftGroupService client2 = clients.get(1); client1.refreshLeader().get(); client2.refreshLeader().get(); long val = applyIncrements(client1, 1, 10); assertEquals(sum(10), val); Peer peer = servers.get(0).localPeers(COUNTER_GROUP_0).get(0); try { client1.snapshot(peer, false).get(); fail(); } catch (Exception e) { assertTrue(e.getCause() instanceof RaftException); } } /** Tests if a raft group become unavailable in case of a critical error. */ @Test public void testApplyWithFailure() throws Exception { listenerFactory = () -> new CounterListener() { @Override public void onWrite(Iterator<CommandClosure<WriteCommand>> iterator) { Iterator<CommandClosure<WriteCommand>> wrapper = new Iterator<>() { @Override public boolean hasNext() { return iterator.hasNext(); } @Override public CommandClosure<WriteCommand> next() { CommandClosure<WriteCommand> cmd = iterator.next(); IncrementAndGetCommand command = (IncrementAndGetCommand) cmd.command(); if (command.delta() == 10) { throw new IgniteInternalException("Very bad"); } return cmd; } }; super.onWrite(wrapper); } }; startCluster(); RaftGroupService client1 = clients.get(0); RaftGroupService client2 = clients.get(1); client1.refreshLeader().get(); client2.refreshLeader().get(); NodeImpl leader = getRaftNodes(COUNTER_GROUP_0) .filter(n -> n.getState() == STATE_LEADER) .findFirst() .orElse(null); assertNotNull(leader); long val1 = applyIncrements(client1, 1, 5); long val2 = applyIncrements(client2, 1, 7); assertEquals(sum(5), val1); assertEquals(sum(7), val2); long val3 = applyIncrements(client1, 6, 9); assertEquals(sum(9), val3); try { client1.<Long>run(incrementAndGetCommand(10)).get(); fail(); } catch (Exception e) { // Expected. Throwable cause = e.getCause(); assertTrue(cause instanceof RaftException); } NodeImpl finalLeader = leader; assertTrue(waitForCondition(() -> finalLeader.getState() == STATE_ERROR, 5_000)); // Client can't switch to new leader, because only one peer in the list. try { client1.<Long>run(incrementAndGetCommand(11)).get(); } catch (Exception e) { boolean isValid = e.getCause() instanceof TimeoutException; if (!isValid) { logger().error("Got unexpected exception", e); } assertTrue(isValid, "Expecting the timeout"); } } /** Tests that users related exceptions from SM are propagated to the client. */ @Test public void testClientCatchExceptionFromSm() throws Exception { listenerFactory = () -> new CounterListener() { @Override public void onWrite(Iterator<CommandClosure<WriteCommand>> iterator) { while (iterator.hasNext()) { CommandClosure<WriteCommand> clo = iterator.next(); IncrementAndGetCommand cmd0 = (IncrementAndGetCommand) clo.command(); clo.result(new RuntimeException("Expected message")); } } @Override public void onRead(Iterator<CommandClosure<ReadCommand>> iterator) { while (iterator.hasNext()) { CommandClosure<ReadCommand> clo = iterator.next(); assert clo.command() instanceof GetValueCommand; clo.result(new RuntimeException("Another expected message")); } } }; startCluster(); RaftGroupService client1 = clients.get(0); RaftGroupService client2 = clients.get(1); client1.refreshLeader().get(); client2.refreshLeader().get(); NodeImpl leader = getRaftNodes(COUNTER_GROUP_0) .filter(n -> n.getState() == STATE_LEADER) .findFirst() .orElse(null); assertNotNull(leader); try { client1.<Long>run(incrementAndGetCommand(3)).get(); fail(); } catch (Exception e) { // Expected. Throwable cause = e.getCause(); assertTrue(cause instanceof RuntimeException); assertEquals(cause.getMessage(), "Expected message"); } try { client1.<Long>run(getValueCommand()).get(); fail(); } catch (Exception e) { // Expected. Throwable cause = e.getCause(); assertTrue(cause instanceof RuntimeException); assertEquals(cause.getMessage(), "Another expected message"); } } /** Tests if a follower is catching up the leader after restarting. */ @Test public void testFollowerCatchUpFromLog() throws Exception { doTestFollowerCatchUp(false, true); } @Test public void testFollowerCatchUpFromSnapshot() throws Exception { doTestFollowerCatchUp(true, true); } @Test public void testFollowerCatchUpFromLog2() throws Exception { doTestFollowerCatchUp(false, false); } @Test public void testFollowerCatchUpFromSnapshot2() throws Exception { doTestFollowerCatchUp(true, false); } /** Tests if a starting a new group in shared pools mode doesn't increase timer threads count. */ @Test public void testTimerThreadsCount() { JraftServerImpl srv0 = startServer(0, x -> { }, opts -> opts.setTimerPoolSize(1)); JraftServerImpl srv1 = startServer(1, x -> { }, opts -> opts.setTimerPoolSize(1)); JraftServerImpl srv2 = startServer(2, x -> { }, opts -> opts.setTimerPoolSize(1)); waitForTopology(srv0.clusterService(), 3, 5_000); ExecutorService svc = Executors.newFixedThreadPool(16); final int groupsCnt = 10; try { List<Future<?>> futs = new ArrayList<>(groupsCnt); for (int i = 0; i < groupsCnt; i++) { int finalI = i; futs.add(svc.submit(() -> { var groupId = new TestReplicationGroupId("counter" + finalI); List<JraftServerImpl> list = Arrays.asList(srv0, srv1, srv2); for (int j = 0; j < list.size(); j++) { RaftServer srv = list.get(j); String localNodeName = srv.clusterService().topologyService().localMember().name(); Peer serverPeer = initialMembersConf.peer(localNodeName); RaftGroupOptions groupOptions = groupOptions(srv) .setLogStorageFactory(logStorageFactories.get(j)) .serverDataPath(serverWorkingDirs.get(j).metaPath()); srv.startRaftNode(new RaftNodeId(groupId, serverPeer), initialMembersConf, listenerFactory.get(), groupOptions); } })); } for (Future<?> fut : futs) { try { fut.get(); } catch (Exception e) { fail(e.getMessage()); } } } finally { ExecutorServiceHelper.shutdownAndAwaitTermination(svc); } for (int i = 0; i < groupsCnt; i++) { TestReplicationGroupId grp = new TestReplicationGroupId("counter" + i); assertTrue(waitForCondition(() -> hasLeader(grp), 30_000)); } Set<Thread> threads = Thread.getAllStackTraces().keySet(); logger().info("RAFT threads count {}", threads.stream().filter(t -> t.getName().contains("JRaft")).count()); List<Thread> timerThreads = threads.stream().filter(this::isTimer).sorted(comparing(Thread::getName)).collect(toList()); assertTrue(timerThreads.size() <= 15, // This is a maximum possible number of a timer threads for 3 nodes in this test. "All timer threads: " + timerThreads.toString()); } /** * The test shows that all committed updates are applied after a RAFT group restart automatically. * Actual data be available to read from state storage (not a state machine) directly just after the RAFT node started. * * @throws Exception If failed. */ @Test public void testApplyUpdatesOutOfSnapshot() throws Exception { var counters = new ConcurrentHashMap<Peer, AtomicInteger>(); var snapshotDataStorage = new ConcurrentHashMap<Path, Integer>(); var snapshotMetaStorage = new ConcurrentHashMap<String, SnapshotMeta>(); var grpId = new TestReplicationGroupId("test_raft_group"); for (int i = 0; i < 3; i++) { int finalI = i; startServer(i, raftServer -> { String localNodeName = raftServer.clusterService().topologyService().localMember().name(); Peer serverPeer = initialMembersConf.peer(localNodeName); var counter = new AtomicInteger(); counters.put(serverPeer, counter); var listener = new UpdateCountRaftListener(counter, snapshotDataStorage); RaftGroupOptions opts = groupOptions(raftServer) .snapshotStorageFactory(new SnapshotInMemoryStorageFactory(snapshotMetaStorage)) .setLogStorageFactory(logStorageFactories.get(finalI)) .serverDataPath(serverWorkingDirs.get(finalI).metaPath()); raftServer.startRaftNode(new RaftNodeId(grpId, serverPeer), initialMembersConf, listener, opts); }, opts -> {}); } var raftClient = startClient(grpId); raftClient.refreshMembers(true).get(); List<Peer> peers = raftClient.peers(); var testWriteCommandBuilder = new TestRaftMessagesFactory().testWriteCommand(); raftClient.run(testWriteCommandBuilder.build()); Peer peer0 = peers.get(0); assertTrue(waitForCondition(() -> counters.get(peer0).get() == 1, 10_000)); raftClient.snapshot(peer0, false).get(); raftClient.run(testWriteCommandBuilder.build()); Peer peer1 = peers.get(1); assertTrue(waitForCondition(() -> counters.get(peer1).get() == 2, 10_000)); raftClient.snapshot(peer1, false).get(); raftClient.run(testWriteCommandBuilder.build()); for (AtomicInteger counter : counters.values()) { assertTrue(waitForCondition(() -> counter.get() == 3, 10_000)); } Path peer0SnapPath = snapshotPath(peer0, grpId); Path peer1SnapPath = snapshotPath(peer1, grpId); Path peer2SnapPath = snapshotPath(peers.get(2), grpId); shutdownCluster(); assertEquals(1, snapshotDataStorage.get(peer0SnapPath)); assertEquals(2, snapshotDataStorage.get(peer1SnapPath)); assertNull(snapshotDataStorage.get(peer2SnapPath)); assertNotNull(snapshotMetaStorage.get(peer0SnapPath.toString())); assertNotNull(snapshotMetaStorage.get(peer1SnapPath.toString())); assertNull(snapshotMetaStorage.get(peer2SnapPath.toString())); for (int i = 0; i < 3; i++) { int finalI = i; startServer(i, raftServer -> { String localNodeName = raftServer.clusterService().topologyService().localMember().name(); Peer serverPeer = initialMembersConf.peer(localNodeName); var counter = counters.get(serverPeer); counter.set(0); var listener = new UpdateCountRaftListener(counter, snapshotDataStorage); RaftGroupOptions opts = groupOptions(raftServer) .snapshotStorageFactory(new SnapshotInMemoryStorageFactory(snapshotMetaStorage)) .setLogStorageFactory(logStorageFactories.get(finalI)) .serverDataPath(serverWorkingDirs.get(finalI).metaPath()); raftServer.startRaftNode(new RaftNodeId(grpId, serverPeer), initialMembersConf, listener, opts); }, opts -> {}); } for (AtomicInteger counter : counters.values()) { assertTrue(waitForCondition(() -> counter.get() == 3, 10_000)); } } /** * Builds a snapshot path by the peer address of RAFT node. */ private Path snapshotPath(Peer peer, ReplicationGroupId groupId) { JraftServerImpl server = servers.stream() .filter(s -> s.localPeers(groupId).contains(peer)) .findAny() .orElseThrow(); int serverIdx = servers.indexOf(server); return JraftServerImpl.getServerDataPath( serverWorkingDirs.get(serverIdx).metaPath(), new RaftNodeId(groupId, peer) ).resolve("snapshot"); } /** * Returns {@code true} if thread is related to timers. * * @param thread The thread. * @return {@code True} if a timer thread. */ private boolean isTimer(Thread thread) { String name = thread.getName(); return name.contains("ElectionTimer") || name.contains("VoteTimer") || name.contains("StepDownTimer") || name.contains("SnapshotTimer") || name.contains("Node-Scheduler"); } /** * Returns {@code true} if a raft group has elected a leader for a some term. * * @param grpId Group id. * @return {@code True} if a leader is elected. */ private boolean hasLeader(TestReplicationGroupId grpId) { return getRaftNodes(grpId) .anyMatch(node -> { var fsm = (StateMachineAdapter) node.getOptions().getFsm(); return node.isLeader() && fsm.getLeaderTerm() == node.getCurrentTerm(); }); } private Stream<NodeImpl> getRaftNodes(TestReplicationGroupId grpId) { return servers.stream() .flatMap(s -> s.localPeers(grpId).stream() .map(peer -> new RaftNodeId(grpId, peer)) .map(s::raftGroupService)) .map(s -> ((NodeImpl) s.getRaftNode())); } /** * Do test follower catch up. * * @param snapshot {@code True} to create snapshot on leader and truncate log. * @param cleanDir {@code True} to clean persistent state on follower before restart. * @throws Exception If failed. */ private void doTestFollowerCatchUp(boolean snapshot, boolean cleanDir) throws Exception { startCluster(); RaftGroupService client1 = clients.get(0); RaftGroupService client2 = clients.get(1); client1.refreshLeader().get(); client2.refreshLeader().get(); Peer leader1 = client1.leader(); assertNotNull(leader1); Peer leader2 = client2.leader(); assertNotNull(leader2); applyIncrements(client1, 0, 10); applyIncrements(client2, 0, 20); // First snapshot will not truncate logs. client1.snapshot(leader1, false).get(); client2.snapshot(leader2, false).get(); JraftServerImpl toStop = null; // Find the follower for both groups. for (JraftServerImpl server : servers) { List<Peer> peers = server.localPeers(COUNTER_GROUP_0); if (!peers.contains(leader1) && !peers.contains(leader2)) { toStop = server; break; } } var raftNodeId0 = new RaftNodeId(COUNTER_GROUP_0, toStop.localPeers(COUNTER_GROUP_0).get(0)); var raftNodeId1 = new RaftNodeId(COUNTER_GROUP_1, toStop.localPeers(COUNTER_GROUP_1).get(0)); int stopIdx = servers.indexOf(toStop); Path basePath = serverWorkingDirs.get(stopIdx).metaPath(); Path serverDataPath0 = JraftServerImpl.getServerDataPath(basePath, raftNodeId0); Path serverDataPath1 = JraftServerImpl.getServerDataPath(basePath, raftNodeId1); toStop.stopRaftNode(raftNodeId0); toStop.stopRaftNode(raftNodeId1); toStop.beforeNodeStop(); ComponentContext componentContext = new ComponentContext(); assertThat(toStop.stopAsync(componentContext), willCompleteSuccessfully()); assertThat(serverServices.get(stopIdx).stopAsync(componentContext), willCompleteSuccessfully()); assertThat(logStorageFactories.get(stopIdx).stopAsync(componentContext), willCompleteSuccessfully()); assertThat(vaultManagers.get(stopIdx).stopAsync(componentContext), willCompleteSuccessfully()); servers.remove(stopIdx); serverServices.remove(stopIdx); logStorageFactories.remove(stopIdx); vaultManagers.remove(stopIdx); serverWorkingDirs.remove(stopIdx); applyIncrements(client1, 11, 20); applyIncrements(client2, 21, 30); if (snapshot) { client1.snapshot(leader1, false).get(); client2.snapshot(leader2, false).get(); } if (cleanDir) { IgniteUtils.deleteIfExists(serverDataPath0); IgniteUtils.deleteIfExists(serverDataPath1); } var svc2 = startServer(stopIdx, r -> { String localNodeName = r.clusterService().topologyService().localMember().name(); Peer serverPeer = initialMembersConf.peer(localNodeName); r.startRaftNode( new RaftNodeId(COUNTER_GROUP_0, serverPeer), initialMembersConf, listenerFactory.get(), groupOptions(r) .setLogStorageFactory(logStorageFactories.get(stopIdx)) .serverDataPath(serverWorkingDirs.get(stopIdx).metaPath()) ); r.startRaftNode( new RaftNodeId(COUNTER_GROUP_1, serverPeer), initialMembersConf, listenerFactory.get(), groupOptions(r) .setLogStorageFactory(logStorageFactories.get(stopIdx)) .serverDataPath(serverWorkingDirs.get(stopIdx).metaPath()) ); }, opts -> {}); assertTrue(waitForCondition(() -> validateStateMachine(sum(20), svc2, COUNTER_GROUP_0), 5_000)); assertTrue(waitForCondition(() -> validateStateMachine(sum(30), svc2, COUNTER_GROUP_1), 5_000)); svc2.stopRaftNodes(COUNTER_GROUP_0); svc2.stopRaftNodes(COUNTER_GROUP_1); svc2.beforeNodeStop(); int sv2Idx = servers.size() - 1; assertThat(svc2.stopAsync(componentContext), willCompleteSuccessfully()); assertThat(serverServices.get(sv2Idx).stopAsync(componentContext), willCompleteSuccessfully()); assertThat(logStorageFactories.get(sv2Idx).stopAsync(componentContext), willCompleteSuccessfully()); assertThat(vaultManagers.get(sv2Idx).stopAsync(componentContext), willCompleteSuccessfully()); servers.remove(sv2Idx); serverServices.remove(sv2Idx); logStorageFactories.remove(sv2Idx); vaultManagers.remove(sv2Idx); serverWorkingDirs.remove(sv2Idx); var svc3 = startServer(stopIdx, r -> { String localNodeName = r.clusterService().topologyService().localMember().name(); Peer serverPeer = initialMembersConf.peer(localNodeName); r.startRaftNode( new RaftNodeId(COUNTER_GROUP_0, serverPeer), initialMembersConf, listenerFactory.get(), groupOptions(r) .setLogStorageFactory(logStorageFactories.get(stopIdx)) .serverDataPath(serverWorkingDirs.get(stopIdx).metaPath()) ); r.startRaftNode( new RaftNodeId(COUNTER_GROUP_1, serverPeer), initialMembersConf, listenerFactory.get(), groupOptions(r) .setLogStorageFactory(logStorageFactories.get(stopIdx)) .serverDataPath(serverWorkingDirs.get(stopIdx).metaPath()) ); }, opts -> {}); assertTrue(waitForCondition(() -> validateStateMachine(sum(20), svc3, COUNTER_GROUP_0), 5_000)); assertTrue(waitForCondition(() -> validateStateMachine(sum(30), svc3, COUNTER_GROUP_1), 5_000)); } /** * Applies increments. * * @param client The client * @param start Start element. * @param stop Stop element. * @return The counter value. * @throws Exception If failed. */ private long applyIncrements(RaftGroupService client, int start, int stop) throws Exception { long val = 0; for (int i = start; i <= stop; i++) { val = client.<Long>run(incrementAndGetCommand(i)).get(); logger().info("Val={}, i={}", val, i); } return val; } /** * Calculates a progression sum. * * @param until Until value. * @return The sum. */ private static long sum(long until) { return (1 + until) * until / 2; } /** * Validates state machine. * * @param expected Expected value. * @param server The server. * @param groupId Group id. * @return Validation result. */ private static boolean validateStateMachine(long expected, JraftServerImpl server, TestReplicationGroupId groupId) { Peer serverPeer = server.localPeers(groupId).get(0); org.apache.ignite.raft.jraft.RaftGroupService svc = server.raftGroupService(new RaftNodeId(groupId, serverPeer)); var fsm0 = (JraftServerImpl.DelegatingStateMachine) svc.getRaftNode().getOptions().getFsm(); return expected == ((CounterListener) fsm0.getListener()).value(); } @Test public void testReadIndex() throws Exception { startCluster(); long index = clients.get(0).readIndex().join(); clients.get(0).<Long>run(incrementAndGetCommand(1)).get(); assertEquals(index + 1, clients.get(0).readIndex().join()); } private static RaftGroupOptions groupOptions(RaftServer raftServer) { return defaults().commandsMarshaller(new ThreadLocalOptimizedMarshaller(raftServer.clusterService().serializationRegistry())); } }
googleapis/google-cloud-java
36,542
java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/PointwiseMetricInstance.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1beta1/evaluation_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1beta1; /** * * * <pre> * Pointwise metric instance. Usually one instance corresponds to one row in an * evaluation dataset. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.PointwiseMetricInstance} */ public final class PointwiseMetricInstance extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.PointwiseMetricInstance) PointwiseMetricInstanceOrBuilder { private static final long serialVersionUID = 0L; // Use PointwiseMetricInstance.newBuilder() to construct. private PointwiseMetricInstance(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private PointwiseMetricInstance() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new PointwiseMetricInstance(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1beta1_PointwiseMetricInstance_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1beta1_PointwiseMetricInstance_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance.class, com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance.Builder.class); } private int instanceCase_ = 0; @SuppressWarnings("serial") private java.lang.Object instance_; public enum InstanceCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { JSON_INSTANCE(1), CONTENT_MAP_INSTANCE(2), INSTANCE_NOT_SET(0); private final int value; private InstanceCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static InstanceCase valueOf(int value) { return forNumber(value); } public static InstanceCase forNumber(int value) { switch (value) { case 1: return JSON_INSTANCE; case 2: return CONTENT_MAP_INSTANCE; case 0: return INSTANCE_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public InstanceCase getInstanceCase() { return InstanceCase.forNumber(instanceCase_); } public static final int JSON_INSTANCE_FIELD_NUMBER = 1; /** * * * <pre> * Instance specified as a json string. String key-value pairs are expected * in the json_instance to render * PointwiseMetricSpec.instance_prompt_template. * </pre> * * <code>string json_instance = 1;</code> * * @return Whether the jsonInstance field is set. */ public boolean hasJsonInstance() { return instanceCase_ == 1; } /** * * * <pre> * Instance specified as a json string. String key-value pairs are expected * in the json_instance to render * PointwiseMetricSpec.instance_prompt_template. * </pre> * * <code>string json_instance = 1;</code> * * @return The jsonInstance. */ public java.lang.String getJsonInstance() { java.lang.Object ref = ""; if (instanceCase_ == 1) { ref = instance_; } if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (instanceCase_ == 1) { instance_ = s; } return s; } } /** * * * <pre> * Instance specified as a json string. String key-value pairs are expected * in the json_instance to render * PointwiseMetricSpec.instance_prompt_template. * </pre> * * <code>string json_instance = 1;</code> * * @return The bytes for jsonInstance. */ public com.google.protobuf.ByteString getJsonInstanceBytes() { java.lang.Object ref = ""; if (instanceCase_ == 1) { ref = instance_; } if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); if (instanceCase_ == 1) { instance_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int CONTENT_MAP_INSTANCE_FIELD_NUMBER = 2; /** * * * <pre> * Key-value contents for the mutlimodality input, including text, image, * video, audio, and pdf, etc. The key is placeholder in metric prompt * template, and the value is the multimodal content. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.ContentMap content_map_instance = 2;</code> * * @return Whether the contentMapInstance field is set. */ @java.lang.Override public boolean hasContentMapInstance() { return instanceCase_ == 2; } /** * * * <pre> * Key-value contents for the mutlimodality input, including text, image, * video, audio, and pdf, etc. The key is placeholder in metric prompt * template, and the value is the multimodal content. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.ContentMap content_map_instance = 2;</code> * * @return The contentMapInstance. */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ContentMap getContentMapInstance() { if (instanceCase_ == 2) { return (com.google.cloud.aiplatform.v1beta1.ContentMap) instance_; } return com.google.cloud.aiplatform.v1beta1.ContentMap.getDefaultInstance(); } /** * * * <pre> * Key-value contents for the mutlimodality input, including text, image, * video, audio, and pdf, etc. The key is placeholder in metric prompt * template, and the value is the multimodal content. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.ContentMap content_map_instance = 2;</code> */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ContentMapOrBuilder getContentMapInstanceOrBuilder() { if (instanceCase_ == 2) { return (com.google.cloud.aiplatform.v1beta1.ContentMap) instance_; } return com.google.cloud.aiplatform.v1beta1.ContentMap.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (instanceCase_ == 1) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, instance_); } if (instanceCase_ == 2) { output.writeMessage(2, (com.google.cloud.aiplatform.v1beta1.ContentMap) instance_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (instanceCase_ == 1) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, instance_); } if (instanceCase_ == 2) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 2, (com.google.cloud.aiplatform.v1beta1.ContentMap) instance_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance)) { return super.equals(obj); } com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance other = (com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance) obj; if (!getInstanceCase().equals(other.getInstanceCase())) return false; switch (instanceCase_) { case 1: if (!getJsonInstance().equals(other.getJsonInstance())) return false; break; case 2: if (!getContentMapInstance().equals(other.getContentMapInstance())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); switch (instanceCase_) { case 1: hash = (37 * hash) + JSON_INSTANCE_FIELD_NUMBER; hash = (53 * hash) + getJsonInstance().hashCode(); break; case 2: hash = (37 * hash) + CONTENT_MAP_INSTANCE_FIELD_NUMBER; hash = (53 * hash) + getContentMapInstance().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Pointwise metric instance. Usually one instance corresponds to one row in an * evaluation dataset. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.PointwiseMetricInstance} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.PointwiseMetricInstance) com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstanceOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1beta1_PointwiseMetricInstance_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1beta1_PointwiseMetricInstance_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance.class, com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance.Builder.class); } // Construct using com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (contentMapInstanceBuilder_ != null) { contentMapInstanceBuilder_.clear(); } instanceCase_ = 0; instance_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1beta1_PointwiseMetricInstance_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance build() { com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance buildPartial() { com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance result = new com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance result) { int from_bitField0_ = bitField0_; } private void buildPartialOneofs( com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance result) { result.instanceCase_ = instanceCase_; result.instance_ = this.instance_; if (instanceCase_ == 2 && contentMapInstanceBuilder_ != null) { result.instance_ = contentMapInstanceBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance) { return mergeFrom((com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance other) { if (other == com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance.getDefaultInstance()) return this; switch (other.getInstanceCase()) { case JSON_INSTANCE: { instanceCase_ = 1; instance_ = other.instance_; onChanged(); break; } case CONTENT_MAP_INSTANCE: { mergeContentMapInstance(other.getContentMapInstance()); break; } case INSTANCE_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); instanceCase_ = 1; instance_ = s; break; } // case 10 case 18: { input.readMessage( getContentMapInstanceFieldBuilder().getBuilder(), extensionRegistry); instanceCase_ = 2; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int instanceCase_ = 0; private java.lang.Object instance_; public InstanceCase getInstanceCase() { return InstanceCase.forNumber(instanceCase_); } public Builder clearInstance() { instanceCase_ = 0; instance_ = null; onChanged(); return this; } private int bitField0_; /** * * * <pre> * Instance specified as a json string. String key-value pairs are expected * in the json_instance to render * PointwiseMetricSpec.instance_prompt_template. * </pre> * * <code>string json_instance = 1;</code> * * @return Whether the jsonInstance field is set. */ @java.lang.Override public boolean hasJsonInstance() { return instanceCase_ == 1; } /** * * * <pre> * Instance specified as a json string. String key-value pairs are expected * in the json_instance to render * PointwiseMetricSpec.instance_prompt_template. * </pre> * * <code>string json_instance = 1;</code> * * @return The jsonInstance. */ @java.lang.Override public java.lang.String getJsonInstance() { java.lang.Object ref = ""; if (instanceCase_ == 1) { ref = instance_; } if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (instanceCase_ == 1) { instance_ = s; } return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Instance specified as a json string. String key-value pairs are expected * in the json_instance to render * PointwiseMetricSpec.instance_prompt_template. * </pre> * * <code>string json_instance = 1;</code> * * @return The bytes for jsonInstance. */ @java.lang.Override public com.google.protobuf.ByteString getJsonInstanceBytes() { java.lang.Object ref = ""; if (instanceCase_ == 1) { ref = instance_; } if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); if (instanceCase_ == 1) { instance_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Instance specified as a json string. String key-value pairs are expected * in the json_instance to render * PointwiseMetricSpec.instance_prompt_template. * </pre> * * <code>string json_instance = 1;</code> * * @param value The jsonInstance to set. * @return This builder for chaining. */ public Builder setJsonInstance(java.lang.String value) { if (value == null) { throw new NullPointerException(); } instanceCase_ = 1; instance_ = value; onChanged(); return this; } /** * * * <pre> * Instance specified as a json string. String key-value pairs are expected * in the json_instance to render * PointwiseMetricSpec.instance_prompt_template. * </pre> * * <code>string json_instance = 1;</code> * * @return This builder for chaining. */ public Builder clearJsonInstance() { if (instanceCase_ == 1) { instanceCase_ = 0; instance_ = null; onChanged(); } return this; } /** * * * <pre> * Instance specified as a json string. String key-value pairs are expected * in the json_instance to render * PointwiseMetricSpec.instance_prompt_template. * </pre> * * <code>string json_instance = 1;</code> * * @param value The bytes for jsonInstance to set. * @return This builder for chaining. */ public Builder setJsonInstanceBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); instanceCase_ = 1; instance_ = value; onChanged(); return this; } private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.ContentMap, com.google.cloud.aiplatform.v1beta1.ContentMap.Builder, com.google.cloud.aiplatform.v1beta1.ContentMapOrBuilder> contentMapInstanceBuilder_; /** * * * <pre> * Key-value contents for the mutlimodality input, including text, image, * video, audio, and pdf, etc. The key is placeholder in metric prompt * template, and the value is the multimodal content. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.ContentMap content_map_instance = 2;</code> * * @return Whether the contentMapInstance field is set. */ @java.lang.Override public boolean hasContentMapInstance() { return instanceCase_ == 2; } /** * * * <pre> * Key-value contents for the mutlimodality input, including text, image, * video, audio, and pdf, etc. The key is placeholder in metric prompt * template, and the value is the multimodal content. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.ContentMap content_map_instance = 2;</code> * * @return The contentMapInstance. */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ContentMap getContentMapInstance() { if (contentMapInstanceBuilder_ == null) { if (instanceCase_ == 2) { return (com.google.cloud.aiplatform.v1beta1.ContentMap) instance_; } return com.google.cloud.aiplatform.v1beta1.ContentMap.getDefaultInstance(); } else { if (instanceCase_ == 2) { return contentMapInstanceBuilder_.getMessage(); } return com.google.cloud.aiplatform.v1beta1.ContentMap.getDefaultInstance(); } } /** * * * <pre> * Key-value contents for the mutlimodality input, including text, image, * video, audio, and pdf, etc. The key is placeholder in metric prompt * template, and the value is the multimodal content. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.ContentMap content_map_instance = 2;</code> */ public Builder setContentMapInstance(com.google.cloud.aiplatform.v1beta1.ContentMap value) { if (contentMapInstanceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } instance_ = value; onChanged(); } else { contentMapInstanceBuilder_.setMessage(value); } instanceCase_ = 2; return this; } /** * * * <pre> * Key-value contents for the mutlimodality input, including text, image, * video, audio, and pdf, etc. The key is placeholder in metric prompt * template, and the value is the multimodal content. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.ContentMap content_map_instance = 2;</code> */ public Builder setContentMapInstance( com.google.cloud.aiplatform.v1beta1.ContentMap.Builder builderForValue) { if (contentMapInstanceBuilder_ == null) { instance_ = builderForValue.build(); onChanged(); } else { contentMapInstanceBuilder_.setMessage(builderForValue.build()); } instanceCase_ = 2; return this; } /** * * * <pre> * Key-value contents for the mutlimodality input, including text, image, * video, audio, and pdf, etc. The key is placeholder in metric prompt * template, and the value is the multimodal content. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.ContentMap content_map_instance = 2;</code> */ public Builder mergeContentMapInstance(com.google.cloud.aiplatform.v1beta1.ContentMap value) { if (contentMapInstanceBuilder_ == null) { if (instanceCase_ == 2 && instance_ != com.google.cloud.aiplatform.v1beta1.ContentMap.getDefaultInstance()) { instance_ = com.google.cloud.aiplatform.v1beta1.ContentMap.newBuilder( (com.google.cloud.aiplatform.v1beta1.ContentMap) instance_) .mergeFrom(value) .buildPartial(); } else { instance_ = value; } onChanged(); } else { if (instanceCase_ == 2) { contentMapInstanceBuilder_.mergeFrom(value); } else { contentMapInstanceBuilder_.setMessage(value); } } instanceCase_ = 2; return this; } /** * * * <pre> * Key-value contents for the mutlimodality input, including text, image, * video, audio, and pdf, etc. The key is placeholder in metric prompt * template, and the value is the multimodal content. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.ContentMap content_map_instance = 2;</code> */ public Builder clearContentMapInstance() { if (contentMapInstanceBuilder_ == null) { if (instanceCase_ == 2) { instanceCase_ = 0; instance_ = null; onChanged(); } } else { if (instanceCase_ == 2) { instanceCase_ = 0; instance_ = null; } contentMapInstanceBuilder_.clear(); } return this; } /** * * * <pre> * Key-value contents for the mutlimodality input, including text, image, * video, audio, and pdf, etc. The key is placeholder in metric prompt * template, and the value is the multimodal content. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.ContentMap content_map_instance = 2;</code> */ public com.google.cloud.aiplatform.v1beta1.ContentMap.Builder getContentMapInstanceBuilder() { return getContentMapInstanceFieldBuilder().getBuilder(); } /** * * * <pre> * Key-value contents for the mutlimodality input, including text, image, * video, audio, and pdf, etc. The key is placeholder in metric prompt * template, and the value is the multimodal content. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.ContentMap content_map_instance = 2;</code> */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ContentMapOrBuilder getContentMapInstanceOrBuilder() { if ((instanceCase_ == 2) && (contentMapInstanceBuilder_ != null)) { return contentMapInstanceBuilder_.getMessageOrBuilder(); } else { if (instanceCase_ == 2) { return (com.google.cloud.aiplatform.v1beta1.ContentMap) instance_; } return com.google.cloud.aiplatform.v1beta1.ContentMap.getDefaultInstance(); } } /** * * * <pre> * Key-value contents for the mutlimodality input, including text, image, * video, audio, and pdf, etc. The key is placeholder in metric prompt * template, and the value is the multimodal content. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.ContentMap content_map_instance = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.ContentMap, com.google.cloud.aiplatform.v1beta1.ContentMap.Builder, com.google.cloud.aiplatform.v1beta1.ContentMapOrBuilder> getContentMapInstanceFieldBuilder() { if (contentMapInstanceBuilder_ == null) { if (!(instanceCase_ == 2)) { instance_ = com.google.cloud.aiplatform.v1beta1.ContentMap.getDefaultInstance(); } contentMapInstanceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.ContentMap, com.google.cloud.aiplatform.v1beta1.ContentMap.Builder, com.google.cloud.aiplatform.v1beta1.ContentMapOrBuilder>( (com.google.cloud.aiplatform.v1beta1.ContentMap) instance_, getParentForChildren(), isClean()); instance_ = null; } instanceCase_ = 2; onChanged(); return contentMapInstanceBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.PointwiseMetricInstance) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.PointwiseMetricInstance) private static final com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance(); } public static com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<PointwiseMetricInstance> PARSER = new com.google.protobuf.AbstractParser<PointwiseMetricInstance>() { @java.lang.Override public PointwiseMetricInstance parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<PointwiseMetricInstance> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<PointwiseMetricInstance> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.PointwiseMetricInstance getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/flink
36,414
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/hints/batch/JoinHintTestBase.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.planner.plan.hints.batch; import org.apache.flink.table.api.ExplainDetail; import org.apache.flink.table.api.SqlParserException; import org.apache.flink.table.api.StatementSet; import org.apache.flink.table.api.TableConfig; import org.apache.flink.table.api.ValidationException; import org.apache.flink.table.api.config.ExecutionConfigOptions; import org.apache.flink.table.api.config.OptimizerConfigOptions; import org.apache.flink.table.planner.hint.JoinStrategy; import org.apache.flink.table.planner.plan.utils.FlinkRelOptUtil; import org.apache.flink.table.planner.utils.BatchTableTestUtil; import org.apache.flink.table.planner.utils.PlanKind; import org.apache.flink.table.planner.utils.TableTestBase; import org.apache.flink.shaded.curator5.org.apache.curator.shaded.com.google.common.collect.Lists; import org.apache.calcite.rel.RelNode; import org.apache.calcite.sql.SqlExplainLevel; import org.apache.logging.log4j.util.Strings; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import java.util.List; import java.util.stream.Collectors; import scala.Enumeration; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static scala.runtime.BoxedUnit.UNIT; /** * A test base for join hint. * * <p>TODO add test to cover legacy table source. * * <p>Notice: Join hints in sub-query will not be printed in AST, because {@code RexSubQuery} use * 'RelOptUtil.toString(rel)' to print node and doesn't print hints about {@code LogicalJoin}. */ public abstract class JoinHintTestBase extends TableTestBase { protected BatchTableTestUtil util; private final List<String> allJoinHintNames = Lists.newArrayList(JoinStrategy.values()).stream() // LOOKUP hint has different kv-options against other join hints .filter(hint -> hint != JoinStrategy.LOOKUP) .map(JoinStrategy::getJoinHintName) .collect(Collectors.toList()); @BeforeEach void before() { util = batchTestUtil(TableConfig.getDefault()); util.tableEnv() .executeSql( "CREATE TABLE T1 (\n" + " a1 BIGINT,\n" + " b1 VARCHAR\n" + ") WITH (\n" + " 'connector' = 'values',\n" + " 'bounded' = 'true'\n" + ")"); util.tableEnv() .executeSql( "CREATE TABLE T2 (\n" + " a2 BIGINT,\n" + " b2 VARCHAR\n" + ") WITH (\n" + " 'connector' = 'values',\n" + " 'bounded' = 'true'\n" + ")"); util.tableEnv() .executeSql( "CREATE TABLE T3 (\n" + " a3 BIGINT,\n" + " b3 VARCHAR\n" + ") WITH (\n" + " 'connector' = 'values',\n" + " 'bounded' = 'true'\n" + ")"); util.tableEnv().executeSql("CREATE View V4 as select a3 as a4, b3 as b4 from T3"); util.tableEnv() .executeSql("create view V5 as select T1.* from T1 join T2 on T1.a1 = T2.a2"); } protected abstract String getTestSingleJoinHint(); protected abstract String getDisabledOperatorName(); protected void verifyRelPlanByCustom(String sql) { util.doVerifyPlan( sql, new ExplainDetail[] {}, false, new Enumeration.Value[] {PlanKind.AST(), PlanKind.OPT_REL()}, true); } protected void verifyRelPlanByCustom(StatementSet set) { util.doVerifyPlan( set, new ExplainDetail[] {}, false, new Enumeration.Value[] {PlanKind.AST(), PlanKind.OPT_REL()}, () -> UNIT, true, false); } protected List<String> getOtherJoinHints() { return allJoinHintNames.stream() .filter(name -> !name.equals(getTestSingleJoinHint())) .collect(Collectors.toList()); } @Test void testSimpleJoinHintWithLeftSideAsBuildSide() { String sql = "select /*+ %s(T1) */* from T1 join T2 on T1.a1 = T2.a2"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testSimpleJoinHintWithRightSideAsBuildSide() { String sql = "select /*+ %s(T2) */* from T1 join T2 on T1.a1 = T2.a2"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithMultiJoinAndFirstSideAsBuildSide1() { // the T1 will be the build side in first join String sql = "select /*+ %s(T1, T2) */* from T1, T2, T3 where T1.a1 = T2.a2 and T1.b1 = T3.b3"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithMultiJoinAndFirstSideAsBuildSide2() { String sql = "select /*+ %s(T1, T2) */* from T1, T2, T3 where T1.a1 = T2.a2 and T2.b2 = T3.b3"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithMultiJoinAndSecondThirdSideAsBuildSides1() { String sql = "select /*+ %s(T2, T3) */* from T1, T2, T3 where T1.a1 = T2.a2 and T1.b1 = T3.b3"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithMultiJoinAndSecondThirdSideAsBuildSides2() { String sql = "select /*+ %s(T2, T3) */* from T1, T2, T3 where T1.a1 = T2.a2 and T2.b2 = T3.b3"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithMultiJoinAndFirstThirdSideAsBuildSides() { String sql = "select /*+ %s(T1, T3) */* from T1, T2, T3 where T1.a1 = T2.a2 and T2.b2 = T3.b3"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithUnknownTable() { String sql = "select /*+ %s(T99) */* from T1 join T2 on T1.a1 = T2.a2"; assertThatThrownBy(() -> verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint()))) .isInstanceOf(ValidationException.class) .hasMessageContaining( "The options of following hints cannot match the name of input tables or views: \n`%s` in `%s`", "T99", getTestSingleJoinHint()); } @Test void testJoinHintWithUnknownTableNameMixedWithValidTableNames1() { String sql = "select /*+ %s(T1, T99) */* from T1 join T2 on T1.a1 = T2.a2"; assertThatThrownBy(() -> verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint()))) .isInstanceOf(ValidationException.class) .hasMessageContaining( "The options of following hints cannot match the name of input tables or views: \n`%s` in `%s`", "T99", getTestSingleJoinHint()); } @Test void testJoinHintWithUnknownTableNameMixedWithValidTableNames2() { String sql = "select /*+ %s(T1, T99, T2) */* from T1 join T2 on T1.a1 = T2.a2"; assertThatThrownBy(() -> verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint()))) .isInstanceOf(ValidationException.class) .hasMessageContaining( "The options of following hints cannot match the name of input tables or views: \n`%s` in `%s`", "T99", getTestSingleJoinHint()); } @Test void testJoinHintWithMultiUnknownTableNamesMixedWithValidTableNames() { String sql = "select /*+ %s(T1, T99, T98) */* from T1 join T2 on T1.a1 = T2.a2"; assertThatThrownBy(() -> verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint()))) .isInstanceOf(ValidationException.class) .hasMessageContaining( "The options of following hints cannot match the name of input tables or views: \n`%s` in `%s`", "T98, T99", getTestSingleJoinHint()); } @Test void testJoinHintWithView() { String sql = "select /*+ %s(V4) */* from T1 join V4 on T1.a1 = V4.a4"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithUnknownView() { String sql = "select /*+ %s(V99) */* from T1 join V4 on T1.a1 = V4.a4"; assertThatThrownBy(() -> verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint()))) .isInstanceOf(ValidationException.class) .hasMessageContaining( "The options of following hints cannot match the name of input tables or views: \n`%s` in `%s`", "V99", getTestSingleJoinHint()); } @Test void testJoinHintWithEquiPred() { String sql = "select /*+ %s(T1) */* from T1, T2 where T1.a1 = T2.a2"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithEquiPredAndFilter() { String sql = "select /*+ %s(T1) */* from T1, T2 where T1.a1 = T2.a2 and T1.a1 > 1"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithEquiAndLocalPred() { String sql = "select /*+ %s(T1) */* from T1 inner join T2 on T1.a1 = T2.a2 and T1.a1 < 1"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithEquiAndNonEquiPred() { String sql = "select /*+ %s(T1) */* from T1 inner join T2 on T1.b1 = T2.b2 and T1.a1 < 1 and T1.a1 < T2.a2"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithoutJoinPred() { String sql = "select /*+ %s(T1) */* from T1, T2"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithNonEquiPred() { String sql = "select /*+ %s(T1) */* from T1 inner join T2 on T1.a1 > T2.a2"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithLeftJoinAndLeftSideAsBuildSide() { String sql = "select /*+ %s(T1) */* from T1 left join T2 on T1.a1 = T2.a2"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithLeftJoinAndRightSideAsBuildSide() { String sql = "select /*+ %s(T2) */* from T1 left join T2 on T1.a1 = T2.a2"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithRightJoinAndLeftSideAsBuildSide() { String sql = "select /*+ %s(T1) */* from T1 right join T2 on T1.a1 = T2.a2"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithRightJoinAndRightSideAsBuildSide() { String sql = "select /*+ %s(T2) */* from T1 right join T2 on T1.a1 = T2.a2"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithFullJoinAndLeftSideAsBuildSide() { String sql = "select /*+ %s(T1) */* from T1 full join T2 on T1.a1 = T2.a2"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithFullJoinAndRightSideAsBuildSide() { String sql = "select /*+ %s(T2) */* from T1 full join T2 on T1.a1 = T2.a2"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } // TODO currently join hint is not supported on SEMI join, it will use default join strategy by // planner @Test void testJoinHintWithSemiJoinAndLeftSideAsBuildSide() { String sql = "select /*+ %s(T1) */* from T1 where a1 in (select a2 from T2)"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } // TODO currently join hint is not supported on SEMI join, it will use default join strategy by // planner @Test void testJoinHintWithSemiJoinAndRightSideAsBuildSide() { String sql = "select /*+ %s(T2) */* from T1 where a1 in (select a2 from T2)"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } // TODO currently join hint is not supported on ANTI join, it will use default join strategy by // planner @Test void testJoinHintWithAntiJoinAndLeftSideAsBuildSide() { String sql = "select /*+ %s(T1) */* from T1 where a1 not in (select a2 from T2)"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } // TODO currently join hint is not supported on ANTI join, it will use default join strategy by // planner @Test void testJoinHintWithAntiJoinAndRightSideAsBuildSide() { String sql = "select /*+ %s(T2) */* from T1 where a1 not in (select a2 from T2)"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithMultiArgsAndLeftSideFirst() { // the first arg will be chosen as the build side String sql = "select /*+ %s(T1, T2) */* from T1 right join T2 on T1.a1 = T2.a2"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithMultiArgsAndRightSideFirst() { // the first arg will be chosen as the build side String sql = "select /*+ %s(T2, T1) */* from T1 right join T2 on T1.a1 = T2.a2"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testMultiJoinHints() { // the first join hint will be chosen String sql = "select /*+ %s(T1), %s */* from T1 join T2 on T1.a1 = T2.a2"; String otherJoinHints = Strings.join( getOtherJoinHints().stream() .map(name -> String.format("%s(T1)", name)) .collect(Collectors.toList()), ','); verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint(), otherJoinHints)); } @Test void testMultiJoinHintsWithTheFirstOneIsInvalid() { // the first join hint is invalid because it is not equi join except NEST_LOOP String sql = "select /*+ %s(T1), NEST_LOOP(T1) */* from T1 join T2 on T1.a1 > T2.a2"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithoutAffectingJoinInViewWhileArgsCanBeFoundInOuterJoin() { // the join in V2 will use the planner's default join strategy, // and the join between T1 and V5 will use the tested join hint String sql = "select /*+ %s(T1)*/T1.* from T1 join V5 on T1.a1 = V5.a1"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithoutAffectingJoinInViewWhileOuterQueryIsNotJoin() { // the join in V2 will use the planner's default join strategy, // and the join between T1 and V5 will use the tested join hint String sql = "select /*+ %s(T1)*/* from V5"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithoutAffectingJoinInViewWhileRootOfViewIsFilter() { // the join in V2 will use the planner's default join strategy, // and the join between T1 and V2 will use the tested join hint util.tableEnv() .executeSql( "create view V2 as select T1.* from T1 join T2 on T1.a1 = T2.a2 where T1.b1 = 'abc'"); String sql = "select /*+ %s(T1)*/* from V2"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithSimpleSumInSelectList() { String sql = "select /*+ %s(T1)*/T1.b1, sum(T1.a1) from T1 join T2 on T1.b1 = T2.b2 group by T1.b1"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithCastInSelectList() { String sql = "select /*+ %s(T1)*/T1.b1, cast(T1.a1 as int) from T1 join T2 on T1.b1 = T2.b2"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithoutAffectingJoinInSubQueryWhileArgsCanBeFoundInOuterJoin() { // the join in sub-query will use the planner's default join strategy, // and the join outside will use the tested join hint String sql = "select /*+ %s(T1)*/T1.* from T1 join (select T1.* from T1 join T2 on T1.a1 = T2.a2) V2 on T1.a1 = V2.a1"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithoutAffectingJoinInSubQueryWhileOuterQueryIsNotJoin() { String sql = "select /*+ %s(T1)*/* from (select T1.* from T1 join T2 on T1.a1 = T2.a2)"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithoutAffectingJoinInSubQueryWhileRootOfSubQueryIsFilter() { String sql = "select /*+ %s(T1)*/* from (select T1.* from T1 join T2 on T1.a1 = T2.a2 where T1.b1 = 'abc')"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithoutAffectingJoinInSubQueryWhileContainsSumInQueryBlock() { String sql = "select /*+ %s(T1)*/T4.a1, (select count(*) from T1 join T3 on T1.a1 = T3.a3) as cnt from (select T1.* from T1 join T2 on T1.a1 = T2.a2 where T1.b1 = 'abc') T4"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithoutAffectingJoinInSubQueryWhileContainsUnionAndJoinInSelectList() { String sql = "select /*+ %s(T1)*/T4.a1, (select count(*) from T1 join ((select T1.a1 as a3 from T1) union (select a3 from T3)) T3 on T1.a1 = T3.a3 where T3.a3 = 1) as cnt from (select T1.* from T1 join T2 on T1.a1 = T2.a2) T4"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithoutAffectingJoinInSubQueryWhileContainsUnionAndJoinInSelectFrom() { String sql = "select /*+ %s(T1)*/T4.a1 from (select T1.* from T1 join ((select T1.a1 as a2 from T1) union (select a2 from T2)) T2 on T1.a1 = T2.a2) T4"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithTableAlias() { // the join in sub-query will use the planner's default join strategy, // and the join between T1 and alias V2 will use the tested join hint String sql = "select /*+ %s(V2)*/T1.* from T1 join (select T1.* from T1 join T2 on T1.a1 = T2.a2) V2 on T1.a1 = V2.a1"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintsWithMultiSameJoinHintsAndSingleArg() { // the first join hint will be chosen and T1 will be chosen as the build side String sql = "select /*+ %s(T1), %s(T2) */* from T1 join T2 on T1.a1 = T2.a2"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint(), getTestSingleJoinHint())); } @Test void testJoinHintsWithDuplicatedArgs() { // T1 will be chosen as the build side String sql = "select /*+ %s(T1, T1) */* from T1 join T2 on T1.a1 = T2.a2"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint(), getTestSingleJoinHint())); } @Test void testJoinHintsWithMultiSameJoinHintsAndMultiArgs() { // the first join hint will be chosen and T1 will be chosen as the build side String sql = "select /*+ %s(T1, T2), %s(T2, T1) */* from T1 join T2 on T1.a1 = T2.a2"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint(), getTestSingleJoinHint())); } @Test void testJoinHintsWithMultiHintsThrowException() { String sql = "select /*+ %s(T1) */ /*+ %s(T2) */ * from T1 join T2 on T1.a1 = T2.a2"; assertThatThrownBy( () -> verifyRelPlanByCustom( String.format( sql, getTestSingleJoinHint(), getTestSingleJoinHint()))) .isInstanceOf(SqlParserException.class) .hasMessageContaining("SQL parse failed."); } @Test void testJoinHintWithDisabledOperator() { util.tableEnv() .getConfig() .set( ExecutionConfigOptions.TABLE_EXEC_DISABLED_OPERATORS, getDisabledOperatorName()); String sql = "select /*+ %s(T1) */* from T1 join T2 on T1.a1 = T2.a2"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintsWithUnion() { // there are two query blocks and join hints are independent String sql = "select /*+ %s(T1) */* from T1 join T2 on T1.a1 = T2.a2 union select /*+ %s(T3) */* from T3 join T1 on T3.a3 = T1.a1"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint(), getTestSingleJoinHint())); } @Test void testJoinHintsWithFilter() { // there are two query blocks and join hints are independent String sql = "select /*+ %s(T1) */* from T1 join T2 on T1.a1 = T2.a2 where T1.a1 > 5"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintsWithCalc() { // there are two query blocks and join hints are independent String sql = "select /*+ %s(T1) */a1 + 1, a1 * 10 from T1 join T2 on T1.a1 = T2.a2"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintInView() { // the build side in view is left util.tableEnv() .executeSql( String.format( "create view V2 as select /*+ %s(T1)*/ T1.* from T1 join T2 on T1.a1 = T2.a2", getTestSingleJoinHint())); // the build side outside is right String sql = "select /*+ %s(V2)*/T3.* from T3 join V2 on T3.a3 = V2.a1"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintInMultiLevelView() { // the inside view keeps multi alias // the build side in this view is left util.tableEnv() .executeSql( String.format( "create view V2 as select /*+ %s(T1)*/ T1.* from T1 join T2 on T1.a1 = T2.a2", getTestSingleJoinHint())); // the build side in this view is right util.tableEnv() .executeSql( String.format( "create view V3 as select /*+ %s(V2)*/ T1.* from T1 join V2 on T1.a1 = V2.a1", getTestSingleJoinHint())); // the build side outside is left String sql = "select /*+ %s(V3)*/V3.* from V3 join T1 on V3.a1 = T1.a1"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintsOnSameViewWithoutReusingView() { // the build side in this view is left util.tableEnv() .executeSql( String.format( "create view V2 as select /*+ %s(T1)*/ T1.* from T1 join T2 on T1.a1 = T2.a2", getTestSingleJoinHint())); util.tableEnv() .executeSql( "CREATE TABLE S1 (\n" + " a1 BIGINT,\n" + " b1 VARCHAR\n" + ") WITH (\n" + " 'connector' = 'values',\n" + " 'bounded' = 'true'\n" + ")"); util.tableEnv() .executeSql( "CREATE TABLE S2 (\n" + " a1 BIGINT,\n" + " b1 VARCHAR\n" + ") WITH (\n" + " 'connector' = 'values',\n" + " 'bounded' = 'true'\n" + ")"); StatementSet set = util.tableEnv().createStatementSet(); // the calc will be pushed down set.addInsertSql( String.format( "insert into S1 select /*+ %s(V2)*/ T1.* from T1 join V2 on T1.a1 = V2.a1 where V2.a1 > 2", getTestSingleJoinHint())); set.addInsertSql( String.format( "insert into S2 select /*+ %s(T1)*/ T1.* from T1 join V2 on T1.a1 = V2.a1 where V2.a1 > 5", getTestSingleJoinHint())); verifyRelPlanByCustom(set); } @Test void testJoinHintsOnSameViewWithReusingView() { util.tableEnv() .getConfig() .set( OptimizerConfigOptions .TABLE_OPTIMIZER_REUSE_OPTIMIZE_BLOCK_WITH_DIGEST_ENABLED, true); // the build side in this view is left util.tableEnv() .executeSql( String.format( "create view V2 as select /*+ %s(T1)*/ T1.* from T1 join T2 on T1.a1 = T2.a2", getTestSingleJoinHint())); util.tableEnv() .executeSql( "CREATE TABLE S1 (\n" + " a1 BIGINT,\n" + " b1 VARCHAR\n" + ") WITH (\n" + " 'connector' = 'values',\n" + " 'bounded' = 'true'\n" + ")"); util.tableEnv() .executeSql( "CREATE TABLE S2 (\n" + " a1 BIGINT,\n" + " b1 VARCHAR\n" + ") WITH (\n" + " 'connector' = 'values',\n" + " 'bounded' = 'true'\n" + ")"); StatementSet set = util.tableEnv().createStatementSet(); // the calc will be pushed down because the view has same digest set.addInsertSql( String.format( "insert into S1 select /*+ %s(V2)*/ T1.* from T1 join V2 on T1.a1 = V2.a1 where V2.a1 > 2", getTestSingleJoinHint())); set.addInsertSql( String.format( "insert into S2 select /*+ %s(T1)*/ T1.* from T1 join V2 on T1.a1 = V2.a1 where V2.a1 > 5", getTestSingleJoinHint())); verifyRelPlanByCustom(set); } @Test void testJoinHintsOnSameViewWithoutReusingViewBecauseDifferentJoinHints() { util.tableEnv() .getConfig() .set( OptimizerConfigOptions .TABLE_OPTIMIZER_REUSE_OPTIMIZE_BLOCK_WITH_DIGEST_ENABLED, true); // the build side in this view is left util.tableEnv() .executeSql( String.format( "create view V2 as select /*+ %s(T1)*/ T1.* from T1 join T2 on T1.a1 = T2.a2", getTestSingleJoinHint())); // the build side in this view is left // V2 and V3 have different join hints util.tableEnv() .executeSql( String.format( "create view V3 as select /*+ %s(T1)*/ T1.* from T1 join T2 on T1.a1 = T2.a2", getOtherJoinHints().get(0))); util.tableEnv() .executeSql( "CREATE TABLE S1 (\n" + " a1 BIGINT,\n" + " b1 VARCHAR\n" + ") WITH (\n" + " 'connector' = 'values',\n" + " 'bounded' = 'true'\n" + ")"); util.tableEnv() .executeSql( "CREATE TABLE S2 (\n" + " a1 BIGINT,\n" + " b1 VARCHAR\n" + ") WITH (\n" + " 'connector' = 'values',\n" + " 'bounded' = 'true'\n" + ")"); StatementSet set = util.tableEnv().createStatementSet(); // the calc will not be pushed down because the view has different digest set.addInsertSql( String.format( "insert into S1 select /*+ %s(V2)*/ T1.* from T1 join V2 on T1.a1 = V2.a1 where V2.a1 > 2", getTestSingleJoinHint())); set.addInsertSql( String.format( "insert into S2 select /*+ %s(T1)*/ T1.* from T1 join V3 on T1.a1 = V3.a1 where V3.a1 > 5", getOtherJoinHints().get(0))); verifyRelPlanByCustom(set); } @Test void testJoinHintWithSubStringViewName1() { util.tableEnv() .executeSql( String.format( "create view V2 as select /*+ %s(T1)*/ T1.* from T1 join T2 on T1.a1 = T2.a2", getTestSingleJoinHint())); // the build side in this view is right util.tableEnv() .executeSql( String.format( "create view V22 as select /*+ %s(V2)*/ T1.* from T1 join V2 on T1.a1 = V2.a1", getTestSingleJoinHint())); // the build side outside is left String sql = "select /*+ %s(V22)*/V22.* from V22 join T1 on V22.a1 = T1.a1"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithSubStringViewName2() { util.tableEnv() .executeSql( String.format( "create view V22 as select /*+ %s(T1)*/ T1.* from T1 join T2 on T1.a1 = T2.a2", getTestSingleJoinHint())); // the build side in this view is right util.tableEnv() .executeSql( String.format( "create view V2 as select /*+ %s(V22)*/ T1.* from T1 join V22 on T1.a1 = V22.a1", getTestSingleJoinHint())); // the build side outside is left String sql = "select /*+ %s(V2)*/V2.* from V2 join T1 on V2.a1 = T1.a1"; verifyRelPlanByCustom(String.format(sql, getTestSingleJoinHint())); } @Test void testJoinHintWithoutCaseSensitive() { String sql = "select /*+ %s(T1) */* from T1 join T2 on T1.a1 = T2.a2"; verifyRelPlanByCustom(String.format(sql, buildCaseSensitiveStr(getTestSingleJoinHint()))); } @Test void testJoinHintWithJoinHintInSubQuery() { String sql = "select * from T1 WHERE a1 IN (select /*+ %s(T2) */ a2 from T2 join T3 on T2.a2 = T3.a3)"; verifyRelPlanByCustom(String.format(sql, buildCaseSensitiveStr(getTestSingleJoinHint()))); } @Test void testJoinHintWithJoinHintInCorrelateAndWithFilter() { String sql = "select * from T1 WHERE a1 IN (select /*+ %s(T2) */ a2 from T2 join T3 on T2.a2 = T3.a3 where T1.a1 = T2.a2)"; verifyRelPlanByCustom(String.format(sql, buildCaseSensitiveStr(getTestSingleJoinHint()))); } @Test void testJoinHintWithJoinHintInCorrelateAndWithProject() { String sql = "select * from T1 WHERE a1 IN (select /*+ %s(T2) */ a2 + T1.a1 from T2 join T3 on T2.a2 = T3.a3)"; verifyRelPlanByCustom(String.format(sql, buildCaseSensitiveStr(getTestSingleJoinHint()))); } @Test void testJoinHintWithJoinHintInCorrelateAndWithAgg() { String sql = "select * from T1 WHERE a1 IN (select /*+ %s(T2) */ count(T2.a2) from T2 join T1 on T2.a2 = T1.a1 group by T1.a1)"; verifyRelPlanByCustom(String.format(sql, buildCaseSensitiveStr(getTestSingleJoinHint()))); } @Test void testJoinHintWithJoinHintInCorrelateAndWithSortLimit() { String sql = "select * from T1 WHERE a1 IN (select /*+ %s(T2) */ T2.a2 from T2 join T1 on T2.a2 = T1.a1 order by T1.a1 limit 10)"; verifyRelPlanByCustom(String.format(sql, buildCaseSensitiveStr(getTestSingleJoinHint()))); } @Test public void testJoinHintWithJoinHintInNestedCorrelatedSubQuery() { String sql = "select * from T1 WHERE a1 IN (select /*+ %s(T2) */ a2 + T1.a1 from T2 join (select T3.* from T2 join T3 on T2.a2 = T3.a3) T3 on T2.a2 = T3.a3)"; verifyRelPlanByCustom(String.format(sql, buildCaseSensitiveStr(getTestSingleJoinHint()))); } protected String buildAstPlanWithQueryBlockAlias(List<RelNode> relNodes) { StringBuilder astBuilder = new StringBuilder(); relNodes.forEach( node -> astBuilder .append(System.lineSeparator()) .append( FlinkRelOptUtil.toString( node, SqlExplainLevel.EXPPLAN_ATTRIBUTES, false, false, true, false, true, false))); return astBuilder.toString(); } private String buildCaseSensitiveStr(String str) { char[] chars = str.toCharArray(); for (int i = 0; i < chars.length; i++) { boolean needCapitalize = i % 2 == 0; if (needCapitalize) { chars[i] = Character.toUpperCase(chars[i]); } else { chars[i] = Character.toLowerCase(chars[i]); } } return new String(chars); } }
apache/jena
36,573
jena-core/src/test/java/org/apache/jena/reasoner/rulesys/test/TestBackchainer.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.reasoner.rulesys.test; import junit.framework.TestCase; import junit.framework.TestSuite; import org.apache.jena.graph.Graph; import org.apache.jena.graph.GraphMemFactory; import org.apache.jena.graph.Node; import org.apache.jena.graph.NodeFactory; import org.apache.jena.graph.Triple; import org.apache.jena.reasoner.InfGraph; import org.apache.jena.reasoner.Reasoner; import org.apache.jena.reasoner.TriplePattern; import org.apache.jena.reasoner.rulesys.Functor; import org.apache.jena.reasoner.rulesys.LPBackwardRuleReasoner; import org.apache.jena.reasoner.rulesys.Node_RuleVariable; import org.apache.jena.reasoner.rulesys.Rule; import org.apache.jena.reasoner.rulesys.Util; import org.apache.jena.reasoner.rulesys.impl.BindingVector; import org.apache.jena.reasoner.test.TestUtil; import org.apache.jena.util.iterator.ExtendedIterator; import org.apache.jena.vocabulary.OWL; import org.apache.jena.vocabulary.RDF; import org.apache.jena.vocabulary.RDFS; import java.util.ArrayList; import java.util.List; /** * Test harness for the backward chainer. * Parameterizable in subclasses by overriding createReasoner. * The original version was developed for the original backchaining interpeter. * That has now been obsoleted at this is now used to double check the * LP engine, though the bulk of such tests are really done by TestBasicLP. */ public class TestBackchainer extends TestCase { // Maximum size of binding environment needed in the tests private static final int MAX_VARS = 10; // Useful constants protected Node p = NodeFactory.createURI("p"); protected Node q = NodeFactory.createURI("q"); protected Node r = NodeFactory.createURI("r"); protected Node s = NodeFactory.createURI("s"); protected Node t = NodeFactory.createURI("t"); protected Node a = NodeFactory.createURI("a"); protected Node b = NodeFactory.createURI("b"); protected Node c = NodeFactory.createURI("c"); protected Node d = NodeFactory.createURI("d"); protected Node C1 = NodeFactory.createURI("C1"); protected Node C2 = NodeFactory.createURI("C2"); protected Node C3 = NodeFactory.createURI("C3"); protected Node sP = RDFS.Nodes.subPropertyOf; protected Node sC = RDFS.Nodes.subClassOf; protected Node ty = RDF.Nodes.type; String testRules1 = "(?x ?q ?y) <- (?p rdfs:subPropertyOf ?q)(?x ?p ?y). " + "(?a rdfs:subPropertyOf ?c) <- (?a rdfs:subPropertyOf ?b)(?b rdfs:subPropertyOf ?c). "; String testRuleAxioms = "[ -> (p rdfs:subPropertyOf q)]" + "[ -> (q rdfs:subPropertyOf r) ]" + "[ -> (a p b) ]"; Triple[] dataElts = new Triple[] { Triple.create(p, sP, q), Triple.create(q, sP, r), Triple.create(a, p, b) }; /** * Boilerplate for junit */ public TestBackchainer( String name ) { super( name ); } /** * Boilerplate for junit. * This is its own test suite */ public static TestSuite suite() { return new TestSuite( TestBackchainer.class ); // TestSuite suite = new TestSuite(); // suite.addTest(new TestBackchainer( "testRDFSProblemsb" )); // return suite; } private static Graph createGraphForTest() { return GraphMemFactory.createDefaultGraph(); } /** * Override in subclasses to test other reasoners. */ public Reasoner createReasoner(List<Rule> rules) { LPBackwardRuleReasoner reasoner = new LPBackwardRuleReasoner(rules); reasoner.tablePredicate(sP); reasoner.tablePredicate(sC); reasoner.tablePredicate(ty); reasoner.tablePredicate(p); reasoner.tablePredicate(a); reasoner.tablePredicate(b); return reasoner; } /** * Test parser modes to support backarrow notation are working */ public void testParse() { List<Rule> rules = Rule.parseRules(testRules1); assertEquals("BRule parsing", "[ (?x ?q ?y) <- (?p rdfs:subPropertyOf ?q) (?x ?p ?y) ]", rules.get(0).toString()); assertEquals("BRule parsing", "[ (?a rdfs:subPropertyOf ?c) <- (?a rdfs:subPropertyOf ?b) (?b rdfs:subPropertyOf ?c) ]", rules.get(1).toString()); } /** * Test goal/head unify operation. */ public void testUnify() { Node_RuleVariable xg = new Node_RuleVariable("?x", 0); Node_RuleVariable yg = new Node_RuleVariable("?y", 1); Node_RuleVariable zg = new Node_RuleVariable("?z", 2); Node_RuleVariable xh = new Node_RuleVariable("?x", 0); Node_RuleVariable yh = new Node_RuleVariable("?y", 1); Node_RuleVariable zh = new Node_RuleVariable("?z", 2); TriplePattern g1 = new TriplePattern(xg, p, yg); TriplePattern g2 = new TriplePattern(xg, p, xg); TriplePattern g3 = new TriplePattern( a, p, xg); TriplePattern g4 = new TriplePattern( a, p, b); TriplePattern h1 = new TriplePattern(xh, p, yh); TriplePattern h2 = new TriplePattern(xh, p, xh); TriplePattern h3 = new TriplePattern( a, p, xh); TriplePattern h4 = new TriplePattern( a, p, b); TriplePattern h5 = new TriplePattern(xh, p, a); doTestUnify(g1, h1, true, new Node[] {null, null}); doTestUnify(g1, h2, true, new Node[] {null, null}); doTestUnify(g1, h3, true, new Node[] {null, null}); doTestUnify(g1, h4, true, new Node[] {null, null}); doTestUnify(g1, h5, true, new Node[] {null, null}); doTestUnify(g2, h1, true, new Node[] {null, xh}); doTestUnify(g2, h2, true, new Node[] {null, null}); doTestUnify(g2, h3, true, new Node[] {a, null}); doTestUnify(g2, h4, false, null); doTestUnify(g2, h5, true, new Node[] {a, null}); doTestUnify(g3, h1, true, new Node[] {a, null}); doTestUnify(g3, h2, true, new Node[] {a, null}); doTestUnify(g3, h3, true, new Node[] {null, null}); doTestUnify(g3, h4, true, new Node[] {null, null}); doTestUnify(g3, h5, true, new Node[] {a, null}); doTestUnify(g4, h1, true, new Node[] {a, b}); doTestUnify(g4, h2, false, null); doTestUnify(g4, h3, true, new Node[] {b}); doTestUnify(g4, h4, true, null); doTestUnify(g4, h5, false, null); // Recursive case doTestUnify(h1, h1, true, new Node[] {null, null}); // Wildcard case doTestUnify(new TriplePattern(null, null, null), h2, true, new Node[] {null, null}); // Test functor cases as well! TriplePattern gf = new TriplePattern(xg, p, Functor.makeFunctorNode("f", new Node[]{xg, b})); TriplePattern hf1 = new TriplePattern(yh, p, Functor.makeFunctorNode("f", new Node[]{zh, b})); TriplePattern hf2 = new TriplePattern(yh, p, Functor.makeFunctorNode("f", new Node[]{a, yh})); TriplePattern hf3 = new TriplePattern(yh, p, Functor.makeFunctorNode("f", new Node[]{b, yh})); doTestUnify(gf, hf1, true, new Node[] {null, null, yh}); doTestUnify(gf, hf2, false, null); doTestUnify(gf, hf3, true, new Node[] {null, b}); // Check binding environment use BindingVector env = BindingVector.unify(g2, h1, MAX_VARS); env.bind(xh, c); assertEquals(env.getBinding(yh), c); env = BindingVector.unify(g2, h1, MAX_VARS); env.bind(yh, c); assertEquals(env.getBinding(xh), c); } /** * Helper for testUnify. * @param goal goal triple pattern * @param head head triple pattern * @param succeed whether match should succeeed or fail * @param env list list of expected environment bindings * */ private void doTestUnify(TriplePattern goal, TriplePattern head, boolean succeed, Node[] env) { BindingVector result = BindingVector.unify(goal, head, MAX_VARS); if (succeed) { assertNotNull(result); if (env != null) { for (int i = 0; i < env.length; i++) { Node n = result.getEnvironment()[i]; if (env[i] != null) { assertEquals(env[i], n); } else { assertNull(n); } } } } else { assertNull(result); } } /** * Check that a reasoner over an empty rule set accesses * the raw data successfully. */ public void testListData() { Graph data = createGraphForTest(); for ( Triple dataElt : dataElts ) { data.add( dataElt ); } Graph schema = createGraphForTest(); schema.add(Triple.create(c, p, c)); // Case of schema and data but no rule axioms Reasoner reasoner = createReasoner(new ArrayList<Rule>()); InfGraph infgraph = reasoner.bindSchema(schema).bind(data); TestUtil.assertIteratorValues(this, infgraph.find(null, null, null), new Object[] { Triple.create(p, sP, q), Triple.create(q, sP, r), Triple.create(a, p, b), Triple.create(c, p, c)}); // Case of data and rule axioms but no schema List<Rule> rules = Rule.parseRules("-> (d p d)."); reasoner = createReasoner(rules); infgraph = reasoner.bind(data); TestUtil.assertIteratorValues(this, infgraph.find(null, null, null), new Object[] { Triple.create(p, sP, q), Triple.create(q, sP, r), Triple.create(a, p, b), Triple.create(d, p, d)}); // Case of data and rule axioms and schema infgraph = reasoner.bindSchema(schema).bind(data); TestUtil.assertIteratorValues(this, infgraph.find(null, null, null), new Object[] { Triple.create(p, sP, q), Triple.create(q, sP, r), Triple.create(a, p, b), Triple.create(c, p, c), Triple.create(d, p, d)}); } /** * Test basic rule operations - simple AND rule */ public void testBaseRules1() { List<Rule> rules = Rule.parseRules("[r1: (?a r ?c) <- (?a p ?b),(?b p ?c)]"); Graph data = createGraphForTest(); data.add(Triple.create(a, p, b)); data.add(Triple.create(b, p, c)); data.add(Triple.create(b, p, d)); Reasoner reasoner = createReasoner(rules); InfGraph infgraph = reasoner.bind(data); TestUtil.assertIteratorValues(this, infgraph.find(null, r, null), new Object[] { Triple.create(a, r, c), Triple.create(a, r, d) } ); } /** * Test basic rule operations - simple OR rule */ public void testBaseRules2() { List<Rule> rules = Rule.parseRules( "[r1: (?a r ?b) <- (?a p ?b)]" + "[r2: (?a r ?b) <- (?a q ?b)]" + "[r3: (?a r ?b) <- (?a s ?c), (?c s ?b)]" ); Graph data = createGraphForTest(); data.add(Triple.create(a, p, b)); data.add(Triple.create(b, q, c)); data.add(Triple.create(a, s, b)); data.add(Triple.create(b, s, d)); Reasoner reasoner = createReasoner(rules); InfGraph infgraph = reasoner.bind(data); TestUtil.assertIteratorValues(this, infgraph.find(null, r, null), new Object[] { Triple.create(a, r, b), Triple.create(b, r, c), Triple.create(a, r, d) } ); } /** * Test basic rule operations - simple OR rule with chaining */ public void testBaseRules2b() { List<Rule> rules = Rule.parseRules( "[r1: (?a r ?b) <- (?a p ?b)]" + "[r2: (?a r ?b) <- (?a q ?b)]" + "[r3: (?a r ?b) <- (?a t ?c), (?c t ?b)]" + "[r4: (?a t ?b) <- (?a s ?b)]" ); Graph data = createGraphForTest(); data.add(Triple.create(a, p, b)); data.add(Triple.create(b, q, c)); data.add(Triple.create(a, s, b)); data.add(Triple.create(b, s, d)); Reasoner reasoner = createReasoner(rules); InfGraph infgraph = reasoner.bind(data); TestUtil.assertIteratorValues(this, infgraph.find(null, r, null), new Object[] { Triple.create(a, r, b), Triple.create(b, r, c), Triple.create(a, r, d) } ); } /** * Test basic rule operations - simple AND rule check with tabling. */ public void testBaseRules3() { List<Rule> rules = Rule.parseRules("[rule: (?a rdfs:subPropertyOf ?c) <- (?a rdfs:subPropertyOf ?b),(?b rdfs:subPropertyOf ?c)]"); Reasoner reasoner = createReasoner(rules); Graph data = createGraphForTest(); data.add(Triple.create(p, sP, q) ); data.add(Triple.create(q, sP, r) ); data.add(Triple.create(p, sP, s) ); data.add(Triple.create(s, sP, t) ); data.add(Triple.create(a, p, b) ); InfGraph infgraph = reasoner.bind(data); TestUtil.assertIteratorValues(this, infgraph.find(null, RDFS.subPropertyOf.asNode(), null), new Object[] { Triple.create(p, sP, q), Triple.create(q, sP, r), Triple.create(p, sP, s), Triple.create(s, sP, t), Triple.create(p, sP, t), Triple.create(p, sP, r) } ); } /** * Test basic rule operations - simple AND rule check with tabling. */ public void testBaseRules3b() { List<Rule> rules = Rule.parseRules("[rule: (?a rdfs:subPropertyOf ?c) <- (?a rdfs:subPropertyOf ?b),(?b rdfs:subPropertyOf ?c)]"); Reasoner reasoner = createReasoner(rules); Graph data = createGraphForTest(); data.add(Triple.create(p, sP, q) ); data.add(Triple.create(q, sP, r) ); data.add(Triple.create(r, sP, t) ); data.add(Triple.create(q, sP, s) ); InfGraph infgraph = reasoner.bind(data); TestUtil.assertIteratorValues(this, infgraph.find(null, RDFS.subPropertyOf.asNode(), null), new Object[] { Triple.create(p, sP, q), Triple.create(q, sP, r), Triple.create(r, sP, t), Triple.create(q, sP, s), Triple.create(p, sP, s), Triple.create(p, sP, r), Triple.create(p, sP, t), Triple.create(q, sP, t), Triple.create(p, sP, r) } ); } /** * Test basic rule operations - simple AND/OR with tabling. */ public void testBaseRules4() { Graph data = createGraphForTest(); data.add(Triple.create(a, r, b)); data.add(Triple.create(b, r, c)); data.add(Triple.create(b, r, b)); data.add(Triple.create(b, r, d)); List<Rule> rules = Rule.parseRules( "[r1: (?x p ?y) <- (?x r ?y)]" + "[r2: (?x p ?z) <- (?x p ?y), (?y r ?z)]" ); Reasoner reasoner = createReasoner(rules); InfGraph infgraph = reasoner.bind(data); TestUtil.assertIteratorValues(this, infgraph.find(a, p, null), new Object[] { Triple.create(a, p, b), Triple.create(a, p, d), Triple.create(a, p, c) } ); } /** * Test basic rule operations - simple AND/OR with tabling. */ public void testBaseRulesXSB1() { Graph data = createGraphForTest(); data.add(Triple.create(p, c, q)); data.add(Triple.create(q, c, r)); data.add(Triple.create(p, d, q)); data.add(Triple.create(q, d, r)); List<Rule> rules = Rule.parseRules( "[r1: (?x a ?y) <- (?x c ?y)]" + "[r2: (?x a ?y) <- (?x b ?z), (?z c ?y)]" + "[r3: (?x b ?y) <- (?x d ?y)]" + "[r4: (?x b ?y) <- (?x a ?z), (?z d ?y)]" ); Reasoner reasoner = createReasoner(rules); InfGraph infgraph = reasoner.bind(data); TestUtil.assertIteratorValues(this, infgraph.find(p, a, null), new Object[] { Triple.create(p, a, q), Triple.create(p, a, r) } ); } /** * Test basic functor usage. */ public void testFunctors1() { Graph data = createGraphForTest(); data.add(Triple.create(a, p, b)); data.add(Triple.create(a, q, c)); List<Rule> rules = Rule.parseRules( "[r1: (?x r f(?y,?z)) <- (?x p ?y), (?x q ?z)]" + "[r2: (?x s ?y) <- (?x r f(?y, ?z))]" ); Reasoner reasoner = createReasoner(rules); InfGraph infgraph = reasoner.bind(data); TestUtil.assertIteratorValues(this, infgraph.find(a, s, null), new Object[] { Triple.create(a, s, b) } ); } /** * Test basic functor usage. */ public void testFunctors2() { Graph data = createGraphForTest(); data.add(Triple.create(a, p, b)); data.add(Triple.create(a, q, c)); data.add(Triple.create(a, t, d)); List<Rule> rules = Rule.parseRules( "[r1: (?x r f(?y,?z)) <- (?x p ?y), (?x q ?z)]" + "[r2: (?x s ?y) <- (?x r f(?y, ?z))]" + "[r3: (?x r g(?y,?z)) <- (?x p ?y), (?x t ?z)]" + "[r4: (?x s ?z) <- (?x r g(?y, ?z))]" ); Reasoner reasoner = createReasoner(rules); InfGraph infgraph = reasoner.bind(data); TestUtil.assertIteratorValues(this, infgraph.find(a, s, null), new Object[] { Triple.create(a, s, b), Triple.create(a, s, d) } ); } /** * Test basic functor usage. */ public void testFunctors3() { Graph data = createGraphForTest(); data.add(Triple.create(a, s, b)); data.add(Triple.create(a, t, c)); List<Rule> rules = Rule.parseRules( "[r1: (a q f(?x,?y)) <- (a s ?x), (a t ?y)]" + "[r2: (a p ?x) <- (a q ?x)]" + "[r3: (a r ?y) <- (a p f(?x, ?y))]" ); Reasoner reasoner = createReasoner(rules); InfGraph infgraph = reasoner.bind(data); TestUtil.assertIteratorValues(this, infgraph.find(a, r, null), new Object[] { Triple.create(a, r, c) } ); } /** * Test basic builtin usage. */ public void testBuiltin1() { Graph data = createGraphForTest(); List<Rule> rules = Rule.parseRules( "[a1: -> (a p 2) ]" + "[a2: -> (a q 3) ]" + "[r1: (?x r ?s) <- (?x p ?y), (?x q ?z), sum(?y, ?z, ?s)]" ); Reasoner reasoner = createReasoner(rules); InfGraph infgraph = reasoner.bind(data); TestUtil.assertIteratorValues(this, infgraph.find(a, r, null), new Object[] { Triple.create(a, r, Util.makeIntNode(5)) } ); } /** * Test basic builtin usage. */ public void testBuiltin2() { Graph data = createGraphForTest(); data.add(Triple.create(a, p, b)); data.add(Triple.create(a, q, c)); List<Rule> rules = Rule.parseRules( "[r1: (?x r ?y ) <- bound(?x), (?x p ?y) ]" + "[r2: (?x r ?y) <- unbound(?x), (?x q ?y)]" ); Reasoner reasoner = createReasoner(rules); InfGraph infgraph = reasoner.bind(data); TestUtil.assertIteratorValues(this, infgraph.find(a, r, null), new Object[] { Triple.create(a, r, b) } ); TestUtil.assertIteratorValues(this, infgraph.find(null, r, null), new Object[] { Triple.create(a, r, c) } ); } /** * Test basic builtin usage. */ public void testBuiltin3() { Graph data = createGraphForTest(); List<Rule> rules = Rule.parseRules( "[r1: (a p b ) <- unbound(?x) ]" ); Reasoner reasoner = createReasoner(rules); InfGraph infgraph = reasoner.bind(data); TestUtil.assertIteratorValues(this, infgraph.find(a, null, null), new Object[] { Triple.create(a, p, b) } ); } /** * Test basic ground head patterns. */ public void testGroundHead() { Graph data = createGraphForTest(); data.add(Triple.create(a, r, b)); List<Rule> rules = Rule.parseRules( "[r1: (a p b ) <- (a r b) ]" ); Reasoner reasoner = createReasoner(rules); InfGraph infgraph = reasoner.bind(data); TestUtil.assertIteratorValues(this, infgraph.find(a, null, null), new Object[] { Triple.create(a, p, b), Triple.create(a, r, b) } ); } // /** // * Test multiheaded rule. // */ // public void testMutliHead() { // Graph data = createGraphForTest(); // data.add(Triple.create(a, p, b)); // data.add(Triple.create(b, r, c)); // List<Rule> rules = Rule.parseRules( // "[r1: (?x s ?z), (?z s ?x) <- (?x p ?y) (?y r ?z) ]" // ); // Reasoner reasoner = createReasoner(rules); // InfGraph infgraph = reasoner.bind(data); // TestUtil.assertIteratorValues(this, // infgraph.find(null, s, null), // new Object[] { // Triple.create(a, s, c), // Triple.create(c, s, a) // } ); // } /** * Test rebind operation */ public void testRebind() { List<Rule> rules = Rule.parseRules("[r1: (?a r ?c) <- (?a p ?b),(?b p ?c)]"); Graph data = createGraphForTest(); data.add(Triple.create(a, p, b)); data.add(Triple.create(b, p, c)); data.add(Triple.create(b, p, d)); Reasoner reasoner = createReasoner(rules); InfGraph infgraph = reasoner.bind(data); TestUtil.assertIteratorValues(this, infgraph.find(null, r, null), new Object[] { Triple.create(a, r, c), Triple.create(a, r, d) } ); Graph ndata = createGraphForTest(); ndata.add(Triple.create(a, p, d)); ndata.add(Triple.create(d, p, b)); infgraph.rebind(ndata); TestUtil.assertIteratorValues(this, infgraph.find(null, r, null), new Object[] { Triple.create(a, r, b) } ); } /** * Test troublesome rdfs rules */ public void testRDFSProblemsb() { Graph data = createGraphForTest(); data.add(Triple.create(C1, sC, C2)); data.add(Triple.create(C2, sC, C3)); data.add(Triple.create(C1, ty, RDFS.Class.asNode())); data.add(Triple.create(C2, ty, RDFS.Class.asNode())); data.add(Triple.create(C3, ty, RDFS.Class.asNode())); List<Rule> rules = Rule.parseRules( "[rdfs8: (?a rdfs:subClassOf ?b), (?b rdfs:subClassOf ?c) -> (?a rdfs:subClassOf ?c)]" + "[rdfs7: (?a rdf:type rdfs:Class) -> (?a rdfs:subClassOf ?a)]" ); Reasoner reasoner = createReasoner(rules); InfGraph infgraph = reasoner.bind(data); TestUtil.assertIteratorValues(this, infgraph.find(null, sC, null), new Object[] { Triple.create(C1, sC, C2), Triple.create(C1, sC, C3), Triple.create(C1, sC, C1), Triple.create(C2, sC, C3), Triple.create(C2, sC, C2), Triple.create(C3, sC, C3), } ); } /** * Test troublesome rdfs rules */ public void testRDFSProblems() { Graph data = createGraphForTest(); data.add(Triple.create(p, sP, q)); data.add(Triple.create(q, sP, r)); data.add(Triple.create(C1, sC, C2)); data.add(Triple.create(C2, sC, C3)); data.add(Triple.create(a, ty, C1)); List<Rule> rules = Rule.parseRules( "[rdfs8: (?a rdfs:subClassOf ?b), (?b rdfs:subClassOf ?c) -> (?a rdfs:subClassOf ?c)]" + "[rdfs9: (?x rdfs:subClassOf ?y), (?a rdf:type ?x) -> (?a rdf:type ?y)]" + // "[-> (rdf:type rdfs:range rdfs:Class)]" + "[rdfs3: (?x ?p ?y), (?p rdfs:range ?c) -> (?y rdf:type ?c)]" + "[rdfs7: (?a rdf:type rdfs:Class) -> (?a rdfs:subClassOf ?a)]" ); Reasoner reasoner = createReasoner(rules); InfGraph infgraph = reasoner.bind(data); TestUtil.assertIteratorValues(this, infgraph.find(a, ty, null), new Object[] { Triple.create(a, ty, C1), Triple.create(a, ty, C2), Triple.create(a, ty, C3) } ); TestUtil.assertIteratorValues(this, infgraph.find(C1, sC, a), new Object[] { } ); } /** * Test complex rule head unification */ public void testHeadUnify() { Graph data = createGraphForTest(); data.add(Triple.create(c, q, d)); List<Rule> rules = Rule.parseRules( "[r1: (c r ?x) <- (?x p f(?x b))]" + "[r2: (?y p f(a ?y)) <- (c q ?y)]" ); Reasoner reasoner = createReasoner(rules); InfGraph infgraph = reasoner.bind(data); TestUtil.assertIteratorValues(this, infgraph.find(c, r, null), new Object[] { } ); data.add(Triple.create(c, q, a)); rules = Rule.parseRules( "[r1: (c r ?x) <- (?x p f(?x a))]" + "[r2: (?y p f(a ?y)) <- (c q ?y)]" ); reasoner = createReasoner(rules); infgraph = reasoner.bind(data); TestUtil.assertIteratorValues(this, infgraph.find(c, r, null), new Object[] { Triple.create(c, r, a) } ); data = createGraphForTest(); data.add(Triple.create(a, q, a)); data.add(Triple.create(a, q, b)); data.add(Triple.create(a, q, c)); data.add(Triple.create(b, q, d)); data.add(Triple.create(b, q, b)); rules = Rule.parseRules( "[r1: (c r ?x) <- (?x p ?x)]" + "[r2: (?x p ?y) <- (a q ?x), (b q ?y)]" ); reasoner = createReasoner(rules); infgraph = reasoner.bind(data); TestUtil.assertIteratorValues(this, infgraph.find(c, r, null), new Object[] { Triple.create(c, r, b) } ); rules = Rule.parseRules( "[r1: (c r ?x) <- (?x p ?x)]" + "[r2: (a p ?x) <- (a q ?x)]" ); reasoner = createReasoner(rules); infgraph = reasoner.bind(data); TestUtil.assertIteratorValues(this, infgraph.find(c, r, null), new Object[] { Triple.create(c, r, a) } ); } /** * Test restriction example */ public void testRestriction1() { Graph data = createGraphForTest(); data.add(Triple.create(a, ty, r)); data.add(Triple.create(a, p, b)); data.add(Triple.create(r, sC, C1)); data.add(Triple.create(C1, OWL.onProperty.asNode(), p)); data.add(Triple.create(C1, OWL.allValuesFrom.asNode(), c)); List<Rule> rules = Rule.parseRules( "[rdfs9: (?x rdfs:subClassOf ?y) (?a rdf:type ?x) -> (?a rdf:type ?y)]" + "[restriction2: (?C owl:onProperty ?P), (?C owl:allValuesFrom ?D) -> (?C owl:equivalentClass all(?P, ?D))]" + "[rs2: (?D owl:equivalentClass all(?P,?C)), (?X rdf:type ?D) -> (?X rdf:type all(?P,?C))]" + "[rp4: (?X rdf:type all(?P, ?C)), (?X ?P ?Y) -> (?Y rdf:type ?C)]" ); Reasoner reasoner = createReasoner(rules); InfGraph infgraph = reasoner.bind(data); TestUtil.assertIteratorValues(this, infgraph.find(b, ty, c), new Object[] { Triple.create(b, ty, c) } ); } /** * Test restriction example. The rules are more than the minimum required * to solve the query and they interact to given run away seaches if there * is a problem. */ public void testRestriction2() { Graph data = createGraphForTest(); data.add(Triple.create(a, ty, OWL.Thing.asNode())); data.add(Triple.create(p, ty, OWL.FunctionalProperty.asNode())); data.add(Triple.create(c, OWL.equivalentClass.asNode(), C1)); data.add(Triple.create(C1, ty, OWL.Restriction.asNode())); data.add(Triple.create(C1, OWL.onProperty.asNode(), p)); data.add(Triple.create(C1, OWL.maxCardinality.asNode(), Util.makeIntNode(1))); List<Rule> rules = Rule.parseRules( // these ones are required for the inference. "[rdfs9: bound(?y) (?x rdfs:subClassOf ?y) (?a rdf:type ?x) -> (?a rdf:type ?y)]" + "[restriction4: (?C rdf:type owl:Restriction), (?C owl:onProperty ?P), (?C owl:maxCardinality ?X) -> (?C owl:equivalentClass max(?P, ?X))]" + "[restrictionProc11: (?P rdf:type owl:FunctionalProperty), (?X rdf:type owl:Thing) -> (?X rdf:type max(?P, 1))]" + // "[equivalentClass1: (?P owl:equivalentClass ?Q) -> (?P rdfs:subClassOf ?Q), (?Q rdfs:subClassOf ?P) ]" + "[equivalentClass1: (?P owl:equivalentClass ?Q) -> (?P rdfs:subClassOf ?Q) ]" + "[equivalentClass1: (?P owl:equivalentClass ?Q) -> (?Q rdfs:subClassOf ?P) ]" + "[restrictionSubclass1: bound(?D) (?D owl:equivalentClass ?R), isFunctor(?R) (?X rdf:type ?R)-> (?X rdf:type ?D)]" + // these ones are noise which can cause run aways or failures if there are bugs "[rdfs8: unbound(?c) (?a rdfs:subClassOf ?b) (?b rdfs:subClassOf ?c) -> (?a rdfs:subClassOf ?c)]" + "[rdfs8: bound(?c) (?b rdfs:subClassOf ?c) (?a rdfs:subClassOf ?b) -> (?a rdfs:subClassOf ?c)]" + "[rdfs9: unbound(?y) (?a rdf:type ?x) (?x rdfs:subClassOf ?y) -> (?a rdf:type ?y)]" + "[-> (rdf:type rdfs:range rdfs:Class)]" + "[rdfs3: bound(?c) (?p rdfs:range ?c) (?x ?p ?y) -> (?y rdf:type ?c)]" + "[rdfs7: (?a rdf:type rdfs:Class) -> (?a rdfs:subClassOf ?a)]" + "[restrictionProc13: (owl:Thing rdfs:subClassOf all(?P, ?C)) -> (?P rdfs:range ?C)]" + "[restrictionSubclass1: unbound(?D) (?X rdf:type ?R), isFunctor(?R) (?D owl:equivalentClass ?R) -> (?X rdf:type ?D)]" + "[restrictionSubclass2: bound(?R), isFunctor(?R), (?D owl:equivalentClass ?R),(?X rdf:type ?D) -> (?X rdf:type ?R)]" + "[restrictionSubclass2: unbound(?R), (?X rdf:type ?D), (?D owl:equivalentClass ?R) isFunctor(?R) -> (?X rdf:type ?R)]" + "" ); Reasoner reasoner = createReasoner(rules); InfGraph infgraph = reasoner.bind(data); TestUtil.assertIteratorValues(this, infgraph.find(a, ty, C1), new Object[] { Triple.create(a, ty, C1) } ); TestUtil.assertIteratorValues(this, infgraph.find(a, ty, c), new Object[] { Triple.create(a, ty, c) } ); } /** * Test restriction example */ public void testRestriction3() { Graph data = createGraphForTest(); data.add(Triple.create(a, ty, r)); data.add(Triple.create(r, sC, C1)); data.add(Triple.create(C1, ty, OWL.Restriction.asNode())); data.add(Triple.create(C1, OWL.onProperty.asNode(), p)); data.add(Triple.create(C1, OWL.allValuesFrom.asNode(), c)); List<Rule> rules = Rule.parseRules( "[-> (rdfs:subClassOf rdfs:range rdfs:Class)]" + // "[-> (owl:Class rdfs:subClassOf rdfs:Class)]" + "[rdfs3: bound(?c) (?p rdfs:range ?c) (?x ?p ?y) -> (?y rdf:type ?c)]" + "[rdfs3: unbound(?c) (?x ?p ?y), (?p rdfs:range ?c) -> (?y rdf:type ?c)]" + "[rdfs7: (?a rdf:type rdfs:Class) -> (?a rdfs:subClassOf ?a)]" + "[rdfs8: (?a rdfs:subClassOf ?b) (?b rdfs:subClassOf ?c) -> (?a rdfs:subClassOf ?c)]" + "[restrictionProc4b: bound(?Y) (?X ?P ?Y), notEqual(?P, rdf:type), (?X rdf:type all(?P, ?C)),-> (?Y rdf:type ?C)]" + "[restrictionProc4b: unbound(?Y), (?X rdf:type all(?P, ?C)), (?X ?P ?Y), notEqual(?P, rdf:type),-> (?Y rdf:type ?C)]" + "" ); Reasoner reasoner = createReasoner(rules); InfGraph infgraph = reasoner.bind(data); TestUtil.assertIteratorValues(this, infgraph.find(null, ty, c), new Object[] { } ); } /** * Test close and halt operation. */ public void testClose() { Graph data = createGraphForTest(); data.add(Triple.create(p, sP, q)); data.add(Triple.create(q, sP, r)); data.add(Triple.create(C1, sC, C2)); data.add(Triple.create(C2, sC, C3)); data.add(Triple.create(a, ty, C1)); data.add(Triple.create(ty, RDFS.range.asNode(), RDFS.Class.asNode())); List<Rule> rules = Rule.parseRules( "[rdfs8: (?a rdfs:subClassOf ?b), (?b rdfs:subClassOf ?c) -> (?a rdfs:subClassOf ?c)]" + "[rdfs9: (?x rdfs:subClassOf ?y), (?a rdf:type ?x) -> (?a rdf:type ?y)]" + // "[-> (rdf:type rdfs:range rdfs:Class)]" + "[rdfs3: (?x ?p ?y), (?p rdfs:range ?c) -> (?y rdf:type ?c)]" + "[rdfs7: (?a rdf:type rdfs:Class) -> (?a rdfs:subClassOf ?a)]" ); Reasoner reasoner = createReasoner(rules); InfGraph infgraph = reasoner.bind(data); // Get just one result ExtendedIterator<Triple> it = infgraph.find(a, ty, null); Triple result = it.next(); assertEquals(result.getSubject(), a); assertEquals(result.getPredicate(), ty); it.close(); // Make sure if we start again we get the full listing. TestUtil.assertIteratorValues(this, infgraph.find(a, ty, null), new Object[] { Triple.create(a, ty, C1), Triple.create(a, ty, C2), Triple.create(a, ty, C3) } ); } /** * Test problematic rdfs case */ public void testBug1() { Graph data = createGraphForTest(); Node p = NodeFactory.createURI("http://www.hpl.hp.com/semweb/2003/eg#p"); Node r = NodeFactory.createURI("http://www.hpl.hp.com/semweb/2003/eg#r"); Node C1 = NodeFactory.createURI("http://www.hpl.hp.com/semweb/2003/eg#C1"); data.add(Triple.create(a, p, b)); List<Rule> rules = Rule.parseRules(Util.loadRuleParserFromResourceFile("testing/reasoners/bugs/rdfs-error1.brules")); Reasoner reasoner = createReasoner(rules); InfGraph infgraph = reasoner.bind(data); TestUtil.assertIteratorValues(this, infgraph.find(b, ty, C1), new Object[] { Triple.create(b, ty, C1) } ); } }
googleapis/google-cloud-java
36,473
java-retail/proto-google-cloud-retail-v2alpha/src/main/java/com/google/cloud/retail/v2alpha/PurgeProductsMetadata.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/retail/v2alpha/purge_config.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.retail.v2alpha; /** * * * <pre> * Metadata related to the progress of the PurgeProducts operation. * This will be returned by the google.longrunning.Operation.metadata field. * </pre> * * Protobuf type {@code google.cloud.retail.v2alpha.PurgeProductsMetadata} */ public final class PurgeProductsMetadata extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.retail.v2alpha.PurgeProductsMetadata) PurgeProductsMetadataOrBuilder { private static final long serialVersionUID = 0L; // Use PurgeProductsMetadata.newBuilder() to construct. private PurgeProductsMetadata(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private PurgeProductsMetadata() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new PurgeProductsMetadata(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.retail.v2alpha.PurgeConfigProto .internal_static_google_cloud_retail_v2alpha_PurgeProductsMetadata_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.retail.v2alpha.PurgeConfigProto .internal_static_google_cloud_retail_v2alpha_PurgeProductsMetadata_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.retail.v2alpha.PurgeProductsMetadata.class, com.google.cloud.retail.v2alpha.PurgeProductsMetadata.Builder.class); } private int bitField0_; public static final int CREATE_TIME_FIELD_NUMBER = 1; private com.google.protobuf.Timestamp createTime_; /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> * * @return Whether the createTime field is set. */ @java.lang.Override public boolean hasCreateTime() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> * * @return The createTime. */ @java.lang.Override public com.google.protobuf.Timestamp getCreateTime() { return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_; } /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> */ @java.lang.Override public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() { return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_; } public static final int UPDATE_TIME_FIELD_NUMBER = 2; private com.google.protobuf.Timestamp updateTime_; /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> * * @return Whether the updateTime field is set. */ @java.lang.Override public boolean hasUpdateTime() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> * * @return The updateTime. */ @java.lang.Override public com.google.protobuf.Timestamp getUpdateTime() { return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_; } /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> */ @java.lang.Override public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() { return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_; } public static final int SUCCESS_COUNT_FIELD_NUMBER = 3; private long successCount_ = 0L; /** * * * <pre> * Count of entries that were deleted successfully. * </pre> * * <code>int64 success_count = 3;</code> * * @return The successCount. */ @java.lang.Override public long getSuccessCount() { return successCount_; } public static final int FAILURE_COUNT_FIELD_NUMBER = 4; private long failureCount_ = 0L; /** * * * <pre> * Count of entries that encountered errors while processing. * </pre> * * <code>int64 failure_count = 4;</code> * * @return The failureCount. */ @java.lang.Override public long getFailureCount() { return failureCount_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getCreateTime()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateTime()); } if (successCount_ != 0L) { output.writeInt64(3, successCount_); } if (failureCount_ != 0L) { output.writeInt64(4, failureCount_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getCreateTime()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateTime()); } if (successCount_ != 0L) { size += com.google.protobuf.CodedOutputStream.computeInt64Size(3, successCount_); } if (failureCount_ != 0L) { size += com.google.protobuf.CodedOutputStream.computeInt64Size(4, failureCount_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.retail.v2alpha.PurgeProductsMetadata)) { return super.equals(obj); } com.google.cloud.retail.v2alpha.PurgeProductsMetadata other = (com.google.cloud.retail.v2alpha.PurgeProductsMetadata) obj; if (hasCreateTime() != other.hasCreateTime()) return false; if (hasCreateTime()) { if (!getCreateTime().equals(other.getCreateTime())) return false; } if (hasUpdateTime() != other.hasUpdateTime()) return false; if (hasUpdateTime()) { if (!getUpdateTime().equals(other.getUpdateTime())) return false; } if (getSuccessCount() != other.getSuccessCount()) return false; if (getFailureCount() != other.getFailureCount()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasCreateTime()) { hash = (37 * hash) + CREATE_TIME_FIELD_NUMBER; hash = (53 * hash) + getCreateTime().hashCode(); } if (hasUpdateTime()) { hash = (37 * hash) + UPDATE_TIME_FIELD_NUMBER; hash = (53 * hash) + getUpdateTime().hashCode(); } hash = (37 * hash) + SUCCESS_COUNT_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getSuccessCount()); hash = (37 * hash) + FAILURE_COUNT_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getFailureCount()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.retail.v2alpha.PurgeProductsMetadata parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.retail.v2alpha.PurgeProductsMetadata parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.retail.v2alpha.PurgeProductsMetadata parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.retail.v2alpha.PurgeProductsMetadata parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.retail.v2alpha.PurgeProductsMetadata parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.retail.v2alpha.PurgeProductsMetadata parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.retail.v2alpha.PurgeProductsMetadata parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.retail.v2alpha.PurgeProductsMetadata parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.retail.v2alpha.PurgeProductsMetadata parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.retail.v2alpha.PurgeProductsMetadata parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.retail.v2alpha.PurgeProductsMetadata parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.retail.v2alpha.PurgeProductsMetadata parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.retail.v2alpha.PurgeProductsMetadata prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Metadata related to the progress of the PurgeProducts operation. * This will be returned by the google.longrunning.Operation.metadata field. * </pre> * * Protobuf type {@code google.cloud.retail.v2alpha.PurgeProductsMetadata} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.retail.v2alpha.PurgeProductsMetadata) com.google.cloud.retail.v2alpha.PurgeProductsMetadataOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.retail.v2alpha.PurgeConfigProto .internal_static_google_cloud_retail_v2alpha_PurgeProductsMetadata_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.retail.v2alpha.PurgeConfigProto .internal_static_google_cloud_retail_v2alpha_PurgeProductsMetadata_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.retail.v2alpha.PurgeProductsMetadata.class, com.google.cloud.retail.v2alpha.PurgeProductsMetadata.Builder.class); } // Construct using com.google.cloud.retail.v2alpha.PurgeProductsMetadata.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getCreateTimeFieldBuilder(); getUpdateTimeFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; createTime_ = null; if (createTimeBuilder_ != null) { createTimeBuilder_.dispose(); createTimeBuilder_ = null; } updateTime_ = null; if (updateTimeBuilder_ != null) { updateTimeBuilder_.dispose(); updateTimeBuilder_ = null; } successCount_ = 0L; failureCount_ = 0L; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.retail.v2alpha.PurgeConfigProto .internal_static_google_cloud_retail_v2alpha_PurgeProductsMetadata_descriptor; } @java.lang.Override public com.google.cloud.retail.v2alpha.PurgeProductsMetadata getDefaultInstanceForType() { return com.google.cloud.retail.v2alpha.PurgeProductsMetadata.getDefaultInstance(); } @java.lang.Override public com.google.cloud.retail.v2alpha.PurgeProductsMetadata build() { com.google.cloud.retail.v2alpha.PurgeProductsMetadata result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.retail.v2alpha.PurgeProductsMetadata buildPartial() { com.google.cloud.retail.v2alpha.PurgeProductsMetadata result = new com.google.cloud.retail.v2alpha.PurgeProductsMetadata(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.retail.v2alpha.PurgeProductsMetadata result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.createTime_ = createTimeBuilder_ == null ? createTime_ : createTimeBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateTime_ = updateTimeBuilder_ == null ? updateTime_ : updateTimeBuilder_.build(); to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.successCount_ = successCount_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.failureCount_ = failureCount_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.retail.v2alpha.PurgeProductsMetadata) { return mergeFrom((com.google.cloud.retail.v2alpha.PurgeProductsMetadata) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.retail.v2alpha.PurgeProductsMetadata other) { if (other == com.google.cloud.retail.v2alpha.PurgeProductsMetadata.getDefaultInstance()) return this; if (other.hasCreateTime()) { mergeCreateTime(other.getCreateTime()); } if (other.hasUpdateTime()) { mergeUpdateTime(other.getUpdateTime()); } if (other.getSuccessCount() != 0L) { setSuccessCount(other.getSuccessCount()); } if (other.getFailureCount() != 0L) { setFailureCount(other.getFailureCount()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getCreateTimeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateTimeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 24: { successCount_ = input.readInt64(); bitField0_ |= 0x00000004; break; } // case 24 case 32: { failureCount_ = input.readInt64(); bitField0_ |= 0x00000008; break; } // case 32 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.protobuf.Timestamp createTime_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> createTimeBuilder_; /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> * * @return Whether the createTime field is set. */ public boolean hasCreateTime() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> * * @return The createTime. */ public com.google.protobuf.Timestamp getCreateTime() { if (createTimeBuilder_ == null) { return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_; } else { return createTimeBuilder_.getMessage(); } } /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> */ public Builder setCreateTime(com.google.protobuf.Timestamp value) { if (createTimeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } createTime_ = value; } else { createTimeBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> */ public Builder setCreateTime(com.google.protobuf.Timestamp.Builder builderForValue) { if (createTimeBuilder_ == null) { createTime_ = builderForValue.build(); } else { createTimeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> */ public Builder mergeCreateTime(com.google.protobuf.Timestamp value) { if (createTimeBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && createTime_ != null && createTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { getCreateTimeBuilder().mergeFrom(value); } else { createTime_ = value; } } else { createTimeBuilder_.mergeFrom(value); } if (createTime_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> */ public Builder clearCreateTime() { bitField0_ = (bitField0_ & ~0x00000001); createTime_ = null; if (createTimeBuilder_ != null) { createTimeBuilder_.dispose(); createTimeBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> */ public com.google.protobuf.Timestamp.Builder getCreateTimeBuilder() { bitField0_ |= 0x00000001; onChanged(); return getCreateTimeFieldBuilder().getBuilder(); } /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> */ public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() { if (createTimeBuilder_ != null) { return createTimeBuilder_.getMessageOrBuilder(); } else { return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_; } } /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> getCreateTimeFieldBuilder() { if (createTimeBuilder_ == null) { createTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( getCreateTime(), getParentForChildren(), isClean()); createTime_ = null; } return createTimeBuilder_; } private com.google.protobuf.Timestamp updateTime_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> updateTimeBuilder_; /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> * * @return Whether the updateTime field is set. */ public boolean hasUpdateTime() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> * * @return The updateTime. */ public com.google.protobuf.Timestamp getUpdateTime() { if (updateTimeBuilder_ == null) { return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_; } else { return updateTimeBuilder_.getMessage(); } } /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> */ public Builder setUpdateTime(com.google.protobuf.Timestamp value) { if (updateTimeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateTime_ = value; } else { updateTimeBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> */ public Builder setUpdateTime(com.google.protobuf.Timestamp.Builder builderForValue) { if (updateTimeBuilder_ == null) { updateTime_ = builderForValue.build(); } else { updateTimeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> */ public Builder mergeUpdateTime(com.google.protobuf.Timestamp value) { if (updateTimeBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateTime_ != null && updateTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { getUpdateTimeBuilder().mergeFrom(value); } else { updateTime_ = value; } } else { updateTimeBuilder_.mergeFrom(value); } if (updateTime_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> */ public Builder clearUpdateTime() { bitField0_ = (bitField0_ & ~0x00000002); updateTime_ = null; if (updateTimeBuilder_ != null) { updateTimeBuilder_.dispose(); updateTimeBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> */ public com.google.protobuf.Timestamp.Builder getUpdateTimeBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateTimeFieldBuilder().getBuilder(); } /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> */ public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() { if (updateTimeBuilder_ != null) { return updateTimeBuilder_.getMessageOrBuilder(); } else { return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_; } } /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> getUpdateTimeFieldBuilder() { if (updateTimeBuilder_ == null) { updateTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( getUpdateTime(), getParentForChildren(), isClean()); updateTime_ = null; } return updateTimeBuilder_; } private long successCount_; /** * * * <pre> * Count of entries that were deleted successfully. * </pre> * * <code>int64 success_count = 3;</code> * * @return The successCount. */ @java.lang.Override public long getSuccessCount() { return successCount_; } /** * * * <pre> * Count of entries that were deleted successfully. * </pre> * * <code>int64 success_count = 3;</code> * * @param value The successCount to set. * @return This builder for chaining. */ public Builder setSuccessCount(long value) { successCount_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Count of entries that were deleted successfully. * </pre> * * <code>int64 success_count = 3;</code> * * @return This builder for chaining. */ public Builder clearSuccessCount() { bitField0_ = (bitField0_ & ~0x00000004); successCount_ = 0L; onChanged(); return this; } private long failureCount_; /** * * * <pre> * Count of entries that encountered errors while processing. * </pre> * * <code>int64 failure_count = 4;</code> * * @return The failureCount. */ @java.lang.Override public long getFailureCount() { return failureCount_; } /** * * * <pre> * Count of entries that encountered errors while processing. * </pre> * * <code>int64 failure_count = 4;</code> * * @param value The failureCount to set. * @return This builder for chaining. */ public Builder setFailureCount(long value) { failureCount_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Count of entries that encountered errors while processing. * </pre> * * <code>int64 failure_count = 4;</code> * * @return This builder for chaining. */ public Builder clearFailureCount() { bitField0_ = (bitField0_ & ~0x00000008); failureCount_ = 0L; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.retail.v2alpha.PurgeProductsMetadata) } // @@protoc_insertion_point(class_scope:google.cloud.retail.v2alpha.PurgeProductsMetadata) private static final com.google.cloud.retail.v2alpha.PurgeProductsMetadata DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.retail.v2alpha.PurgeProductsMetadata(); } public static com.google.cloud.retail.v2alpha.PurgeProductsMetadata getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<PurgeProductsMetadata> PARSER = new com.google.protobuf.AbstractParser<PurgeProductsMetadata>() { @java.lang.Override public PurgeProductsMetadata parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<PurgeProductsMetadata> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<PurgeProductsMetadata> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.retail.v2alpha.PurgeProductsMetadata getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,557
java-talent/proto-google-cloud-talent-v4/src/main/java/com/google/cloud/talent/v4/HistogramQueryResult.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/talent/v4/histogram.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.talent.v4; /** * * * <pre> * Histogram result that matches * [HistogramQuery][google.cloud.talent.v4.HistogramQuery] specified in * searches. * </pre> * * Protobuf type {@code google.cloud.talent.v4.HistogramQueryResult} */ public final class HistogramQueryResult extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.talent.v4.HistogramQueryResult) HistogramQueryResultOrBuilder { private static final long serialVersionUID = 0L; // Use HistogramQueryResult.newBuilder() to construct. private HistogramQueryResult(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private HistogramQueryResult() { histogramQuery_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new HistogramQueryResult(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.talent.v4.HistogramProto .internal_static_google_cloud_talent_v4_HistogramQueryResult_descriptor; } @SuppressWarnings({"rawtypes"}) @java.lang.Override protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection( int number) { switch (number) { case 2: return internalGetHistogram(); default: throw new RuntimeException("Invalid map field number: " + number); } } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.talent.v4.HistogramProto .internal_static_google_cloud_talent_v4_HistogramQueryResult_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.talent.v4.HistogramQueryResult.class, com.google.cloud.talent.v4.HistogramQueryResult.Builder.class); } public static final int HISTOGRAM_QUERY_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object histogramQuery_ = ""; /** * * * <pre> * Requested histogram expression. * </pre> * * <code>string histogram_query = 1;</code> * * @return The histogramQuery. */ @java.lang.Override public java.lang.String getHistogramQuery() { java.lang.Object ref = histogramQuery_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); histogramQuery_ = s; return s; } } /** * * * <pre> * Requested histogram expression. * </pre> * * <code>string histogram_query = 1;</code> * * @return The bytes for histogramQuery. */ @java.lang.Override public com.google.protobuf.ByteString getHistogramQueryBytes() { java.lang.Object ref = histogramQuery_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); histogramQuery_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int HISTOGRAM_FIELD_NUMBER = 2; private static final class HistogramDefaultEntryHolder { static final com.google.protobuf.MapEntry<java.lang.String, java.lang.Long> defaultEntry = com.google.protobuf.MapEntry.<java.lang.String, java.lang.Long>newDefaultInstance( com.google.cloud.talent.v4.HistogramProto .internal_static_google_cloud_talent_v4_HistogramQueryResult_HistogramEntry_descriptor, com.google.protobuf.WireFormat.FieldType.STRING, "", com.google.protobuf.WireFormat.FieldType.INT64, 0L); } @SuppressWarnings("serial") private com.google.protobuf.MapField<java.lang.String, java.lang.Long> histogram_; private com.google.protobuf.MapField<java.lang.String, java.lang.Long> internalGetHistogram() { if (histogram_ == null) { return com.google.protobuf.MapField.emptyMapField(HistogramDefaultEntryHolder.defaultEntry); } return histogram_; } public int getHistogramCount() { return internalGetHistogram().getMap().size(); } /** * * * <pre> * A map from the values of the facet associated with distinct values to the * number of matching entries with corresponding value. * * The key format is: * * * (for string histogram) string values stored in the field. * * (for named numeric bucket) name specified in `bucket()` function, like * for `bucket(0, MAX, "non-negative")`, the key will be `non-negative`. * * (for anonymous numeric bucket) range formatted as `&lt;low&gt;-&lt;high&gt;`, for * example, `0-1000`, `MIN-0`, and `0-MAX`. * </pre> * * <code>map&lt;string, int64&gt; histogram = 2;</code> */ @java.lang.Override public boolean containsHistogram(java.lang.String key) { if (key == null) { throw new NullPointerException("map key"); } return internalGetHistogram().getMap().containsKey(key); } /** Use {@link #getHistogramMap()} instead. */ @java.lang.Override @java.lang.Deprecated public java.util.Map<java.lang.String, java.lang.Long> getHistogram() { return getHistogramMap(); } /** * * * <pre> * A map from the values of the facet associated with distinct values to the * number of matching entries with corresponding value. * * The key format is: * * * (for string histogram) string values stored in the field. * * (for named numeric bucket) name specified in `bucket()` function, like * for `bucket(0, MAX, "non-negative")`, the key will be `non-negative`. * * (for anonymous numeric bucket) range formatted as `&lt;low&gt;-&lt;high&gt;`, for * example, `0-1000`, `MIN-0`, and `0-MAX`. * </pre> * * <code>map&lt;string, int64&gt; histogram = 2;</code> */ @java.lang.Override public java.util.Map<java.lang.String, java.lang.Long> getHistogramMap() { return internalGetHistogram().getMap(); } /** * * * <pre> * A map from the values of the facet associated with distinct values to the * number of matching entries with corresponding value. * * The key format is: * * * (for string histogram) string values stored in the field. * * (for named numeric bucket) name specified in `bucket()` function, like * for `bucket(0, MAX, "non-negative")`, the key will be `non-negative`. * * (for anonymous numeric bucket) range formatted as `&lt;low&gt;-&lt;high&gt;`, for * example, `0-1000`, `MIN-0`, and `0-MAX`. * </pre> * * <code>map&lt;string, int64&gt; histogram = 2;</code> */ @java.lang.Override public long getHistogramOrDefault(java.lang.String key, long defaultValue) { if (key == null) { throw new NullPointerException("map key"); } java.util.Map<java.lang.String, java.lang.Long> map = internalGetHistogram().getMap(); return map.containsKey(key) ? map.get(key) : defaultValue; } /** * * * <pre> * A map from the values of the facet associated with distinct values to the * number of matching entries with corresponding value. * * The key format is: * * * (for string histogram) string values stored in the field. * * (for named numeric bucket) name specified in `bucket()` function, like * for `bucket(0, MAX, "non-negative")`, the key will be `non-negative`. * * (for anonymous numeric bucket) range formatted as `&lt;low&gt;-&lt;high&gt;`, for * example, `0-1000`, `MIN-0`, and `0-MAX`. * </pre> * * <code>map&lt;string, int64&gt; histogram = 2;</code> */ @java.lang.Override public long getHistogramOrThrow(java.lang.String key) { if (key == null) { throw new NullPointerException("map key"); } java.util.Map<java.lang.String, java.lang.Long> map = internalGetHistogram().getMap(); if (!map.containsKey(key)) { throw new java.lang.IllegalArgumentException(); } return map.get(key); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(histogramQuery_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, histogramQuery_); } com.google.protobuf.GeneratedMessageV3.serializeStringMapTo( output, internalGetHistogram(), HistogramDefaultEntryHolder.defaultEntry, 2); getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(histogramQuery_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, histogramQuery_); } for (java.util.Map.Entry<java.lang.String, java.lang.Long> entry : internalGetHistogram().getMap().entrySet()) { com.google.protobuf.MapEntry<java.lang.String, java.lang.Long> histogram__ = HistogramDefaultEntryHolder.defaultEntry .newBuilderForType() .setKey(entry.getKey()) .setValue(entry.getValue()) .build(); size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, histogram__); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.talent.v4.HistogramQueryResult)) { return super.equals(obj); } com.google.cloud.talent.v4.HistogramQueryResult other = (com.google.cloud.talent.v4.HistogramQueryResult) obj; if (!getHistogramQuery().equals(other.getHistogramQuery())) return false; if (!internalGetHistogram().equals(other.internalGetHistogram())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + HISTOGRAM_QUERY_FIELD_NUMBER; hash = (53 * hash) + getHistogramQuery().hashCode(); if (!internalGetHistogram().getMap().isEmpty()) { hash = (37 * hash) + HISTOGRAM_FIELD_NUMBER; hash = (53 * hash) + internalGetHistogram().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.talent.v4.HistogramQueryResult parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.talent.v4.HistogramQueryResult parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.talent.v4.HistogramQueryResult parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.talent.v4.HistogramQueryResult parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.talent.v4.HistogramQueryResult parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.talent.v4.HistogramQueryResult parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.talent.v4.HistogramQueryResult parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.talent.v4.HistogramQueryResult parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.talent.v4.HistogramQueryResult parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.talent.v4.HistogramQueryResult parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.talent.v4.HistogramQueryResult parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.talent.v4.HistogramQueryResult parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.talent.v4.HistogramQueryResult prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Histogram result that matches * [HistogramQuery][google.cloud.talent.v4.HistogramQuery] specified in * searches. * </pre> * * Protobuf type {@code google.cloud.talent.v4.HistogramQueryResult} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.talent.v4.HistogramQueryResult) com.google.cloud.talent.v4.HistogramQueryResultOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.talent.v4.HistogramProto .internal_static_google_cloud_talent_v4_HistogramQueryResult_descriptor; } @SuppressWarnings({"rawtypes"}) protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection( int number) { switch (number) { case 2: return internalGetHistogram(); default: throw new RuntimeException("Invalid map field number: " + number); } } @SuppressWarnings({"rawtypes"}) protected com.google.protobuf.MapFieldReflectionAccessor internalGetMutableMapFieldReflection( int number) { switch (number) { case 2: return internalGetMutableHistogram(); default: throw new RuntimeException("Invalid map field number: " + number); } } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.talent.v4.HistogramProto .internal_static_google_cloud_talent_v4_HistogramQueryResult_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.talent.v4.HistogramQueryResult.class, com.google.cloud.talent.v4.HistogramQueryResult.Builder.class); } // Construct using com.google.cloud.talent.v4.HistogramQueryResult.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; histogramQuery_ = ""; internalGetMutableHistogram().clear(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.talent.v4.HistogramProto .internal_static_google_cloud_talent_v4_HistogramQueryResult_descriptor; } @java.lang.Override public com.google.cloud.talent.v4.HistogramQueryResult getDefaultInstanceForType() { return com.google.cloud.talent.v4.HistogramQueryResult.getDefaultInstance(); } @java.lang.Override public com.google.cloud.talent.v4.HistogramQueryResult build() { com.google.cloud.talent.v4.HistogramQueryResult result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.talent.v4.HistogramQueryResult buildPartial() { com.google.cloud.talent.v4.HistogramQueryResult result = new com.google.cloud.talent.v4.HistogramQueryResult(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.talent.v4.HistogramQueryResult result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.histogramQuery_ = histogramQuery_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.histogram_ = internalGetHistogram(); result.histogram_.makeImmutable(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.talent.v4.HistogramQueryResult) { return mergeFrom((com.google.cloud.talent.v4.HistogramQueryResult) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.talent.v4.HistogramQueryResult other) { if (other == com.google.cloud.talent.v4.HistogramQueryResult.getDefaultInstance()) return this; if (!other.getHistogramQuery().isEmpty()) { histogramQuery_ = other.histogramQuery_; bitField0_ |= 0x00000001; onChanged(); } internalGetMutableHistogram().mergeFrom(other.internalGetHistogram()); bitField0_ |= 0x00000002; this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { histogramQuery_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { com.google.protobuf.MapEntry<java.lang.String, java.lang.Long> histogram__ = input.readMessage( HistogramDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); internalGetMutableHistogram() .getMutableMap() .put(histogram__.getKey(), histogram__.getValue()); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object histogramQuery_ = ""; /** * * * <pre> * Requested histogram expression. * </pre> * * <code>string histogram_query = 1;</code> * * @return The histogramQuery. */ public java.lang.String getHistogramQuery() { java.lang.Object ref = histogramQuery_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); histogramQuery_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Requested histogram expression. * </pre> * * <code>string histogram_query = 1;</code> * * @return The bytes for histogramQuery. */ public com.google.protobuf.ByteString getHistogramQueryBytes() { java.lang.Object ref = histogramQuery_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); histogramQuery_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Requested histogram expression. * </pre> * * <code>string histogram_query = 1;</code> * * @param value The histogramQuery to set. * @return This builder for chaining. */ public Builder setHistogramQuery(java.lang.String value) { if (value == null) { throw new NullPointerException(); } histogramQuery_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Requested histogram expression. * </pre> * * <code>string histogram_query = 1;</code> * * @return This builder for chaining. */ public Builder clearHistogramQuery() { histogramQuery_ = getDefaultInstance().getHistogramQuery(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Requested histogram expression. * </pre> * * <code>string histogram_query = 1;</code> * * @param value The bytes for histogramQuery to set. * @return This builder for chaining. */ public Builder setHistogramQueryBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); histogramQuery_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.protobuf.MapField<java.lang.String, java.lang.Long> histogram_; private com.google.protobuf.MapField<java.lang.String, java.lang.Long> internalGetHistogram() { if (histogram_ == null) { return com.google.protobuf.MapField.emptyMapField(HistogramDefaultEntryHolder.defaultEntry); } return histogram_; } private com.google.protobuf.MapField<java.lang.String, java.lang.Long> internalGetMutableHistogram() { if (histogram_ == null) { histogram_ = com.google.protobuf.MapField.newMapField(HistogramDefaultEntryHolder.defaultEntry); } if (!histogram_.isMutable()) { histogram_ = histogram_.copy(); } bitField0_ |= 0x00000002; onChanged(); return histogram_; } public int getHistogramCount() { return internalGetHistogram().getMap().size(); } /** * * * <pre> * A map from the values of the facet associated with distinct values to the * number of matching entries with corresponding value. * * The key format is: * * * (for string histogram) string values stored in the field. * * (for named numeric bucket) name specified in `bucket()` function, like * for `bucket(0, MAX, "non-negative")`, the key will be `non-negative`. * * (for anonymous numeric bucket) range formatted as `&lt;low&gt;-&lt;high&gt;`, for * example, `0-1000`, `MIN-0`, and `0-MAX`. * </pre> * * <code>map&lt;string, int64&gt; histogram = 2;</code> */ @java.lang.Override public boolean containsHistogram(java.lang.String key) { if (key == null) { throw new NullPointerException("map key"); } return internalGetHistogram().getMap().containsKey(key); } /** Use {@link #getHistogramMap()} instead. */ @java.lang.Override @java.lang.Deprecated public java.util.Map<java.lang.String, java.lang.Long> getHistogram() { return getHistogramMap(); } /** * * * <pre> * A map from the values of the facet associated with distinct values to the * number of matching entries with corresponding value. * * The key format is: * * * (for string histogram) string values stored in the field. * * (for named numeric bucket) name specified in `bucket()` function, like * for `bucket(0, MAX, "non-negative")`, the key will be `non-negative`. * * (for anonymous numeric bucket) range formatted as `&lt;low&gt;-&lt;high&gt;`, for * example, `0-1000`, `MIN-0`, and `0-MAX`. * </pre> * * <code>map&lt;string, int64&gt; histogram = 2;</code> */ @java.lang.Override public java.util.Map<java.lang.String, java.lang.Long> getHistogramMap() { return internalGetHistogram().getMap(); } /** * * * <pre> * A map from the values of the facet associated with distinct values to the * number of matching entries with corresponding value. * * The key format is: * * * (for string histogram) string values stored in the field. * * (for named numeric bucket) name specified in `bucket()` function, like * for `bucket(0, MAX, "non-negative")`, the key will be `non-negative`. * * (for anonymous numeric bucket) range formatted as `&lt;low&gt;-&lt;high&gt;`, for * example, `0-1000`, `MIN-0`, and `0-MAX`. * </pre> * * <code>map&lt;string, int64&gt; histogram = 2;</code> */ @java.lang.Override public long getHistogramOrDefault(java.lang.String key, long defaultValue) { if (key == null) { throw new NullPointerException("map key"); } java.util.Map<java.lang.String, java.lang.Long> map = internalGetHistogram().getMap(); return map.containsKey(key) ? map.get(key) : defaultValue; } /** * * * <pre> * A map from the values of the facet associated with distinct values to the * number of matching entries with corresponding value. * * The key format is: * * * (for string histogram) string values stored in the field. * * (for named numeric bucket) name specified in `bucket()` function, like * for `bucket(0, MAX, "non-negative")`, the key will be `non-negative`. * * (for anonymous numeric bucket) range formatted as `&lt;low&gt;-&lt;high&gt;`, for * example, `0-1000`, `MIN-0`, and `0-MAX`. * </pre> * * <code>map&lt;string, int64&gt; histogram = 2;</code> */ @java.lang.Override public long getHistogramOrThrow(java.lang.String key) { if (key == null) { throw new NullPointerException("map key"); } java.util.Map<java.lang.String, java.lang.Long> map = internalGetHistogram().getMap(); if (!map.containsKey(key)) { throw new java.lang.IllegalArgumentException(); } return map.get(key); } public Builder clearHistogram() { bitField0_ = (bitField0_ & ~0x00000002); internalGetMutableHistogram().getMutableMap().clear(); return this; } /** * * * <pre> * A map from the values of the facet associated with distinct values to the * number of matching entries with corresponding value. * * The key format is: * * * (for string histogram) string values stored in the field. * * (for named numeric bucket) name specified in `bucket()` function, like * for `bucket(0, MAX, "non-negative")`, the key will be `non-negative`. * * (for anonymous numeric bucket) range formatted as `&lt;low&gt;-&lt;high&gt;`, for * example, `0-1000`, `MIN-0`, and `0-MAX`. * </pre> * * <code>map&lt;string, int64&gt; histogram = 2;</code> */ public Builder removeHistogram(java.lang.String key) { if (key == null) { throw new NullPointerException("map key"); } internalGetMutableHistogram().getMutableMap().remove(key); return this; } /** Use alternate mutation accessors instead. */ @java.lang.Deprecated public java.util.Map<java.lang.String, java.lang.Long> getMutableHistogram() { bitField0_ |= 0x00000002; return internalGetMutableHistogram().getMutableMap(); } /** * * * <pre> * A map from the values of the facet associated with distinct values to the * number of matching entries with corresponding value. * * The key format is: * * * (for string histogram) string values stored in the field. * * (for named numeric bucket) name specified in `bucket()` function, like * for `bucket(0, MAX, "non-negative")`, the key will be `non-negative`. * * (for anonymous numeric bucket) range formatted as `&lt;low&gt;-&lt;high&gt;`, for * example, `0-1000`, `MIN-0`, and `0-MAX`. * </pre> * * <code>map&lt;string, int64&gt; histogram = 2;</code> */ public Builder putHistogram(java.lang.String key, long value) { if (key == null) { throw new NullPointerException("map key"); } internalGetMutableHistogram().getMutableMap().put(key, value); bitField0_ |= 0x00000002; return this; } /** * * * <pre> * A map from the values of the facet associated with distinct values to the * number of matching entries with corresponding value. * * The key format is: * * * (for string histogram) string values stored in the field. * * (for named numeric bucket) name specified in `bucket()` function, like * for `bucket(0, MAX, "non-negative")`, the key will be `non-negative`. * * (for anonymous numeric bucket) range formatted as `&lt;low&gt;-&lt;high&gt;`, for * example, `0-1000`, `MIN-0`, and `0-MAX`. * </pre> * * <code>map&lt;string, int64&gt; histogram = 2;</code> */ public Builder putAllHistogram(java.util.Map<java.lang.String, java.lang.Long> values) { internalGetMutableHistogram().getMutableMap().putAll(values); bitField0_ |= 0x00000002; return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.talent.v4.HistogramQueryResult) } // @@protoc_insertion_point(class_scope:google.cloud.talent.v4.HistogramQueryResult) private static final com.google.cloud.talent.v4.HistogramQueryResult DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.talent.v4.HistogramQueryResult(); } public static com.google.cloud.talent.v4.HistogramQueryResult getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<HistogramQueryResult> PARSER = new com.google.protobuf.AbstractParser<HistogramQueryResult>() { @java.lang.Override public HistogramQueryResult parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<HistogramQueryResult> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<HistogramQueryResult> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.talent.v4.HistogramQueryResult getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/dubbo
36,810
dubbo-config/dubbo-config-api/src/main/java/org/apache/dubbo/config/utils/ConfigValidationUtils.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dubbo.config.utils; import org.apache.dubbo.common.URL; import org.apache.dubbo.common.URLBuilder; import org.apache.dubbo.common.config.ConfigurationUtils; import org.apache.dubbo.common.config.PropertiesConfiguration; import org.apache.dubbo.common.logger.ErrorTypeAwareLogger; import org.apache.dubbo.common.logger.LoggerFactory; import org.apache.dubbo.common.serialize.Serialization; import org.apache.dubbo.common.status.StatusChecker; import org.apache.dubbo.common.status.reporter.FrameworkStatusReportService; import org.apache.dubbo.common.threadpool.ThreadPool; import org.apache.dubbo.common.utils.ClassUtils; import org.apache.dubbo.common.utils.CollectionUtils; import org.apache.dubbo.common.utils.ConfigUtils; import org.apache.dubbo.common.utils.NetUtils; import org.apache.dubbo.common.utils.StringUtils; import org.apache.dubbo.common.utils.SystemPropertyConfigUtils; import org.apache.dubbo.common.utils.UrlUtils; import org.apache.dubbo.config.AbstractConfig; import org.apache.dubbo.config.AbstractInterfaceConfig; import org.apache.dubbo.config.ApplicationConfig; import org.apache.dubbo.config.ConfigCenterConfig; import org.apache.dubbo.config.ConsumerConfig; import org.apache.dubbo.config.MetadataReportConfig; import org.apache.dubbo.config.MethodConfig; import org.apache.dubbo.config.MetricsConfig; import org.apache.dubbo.config.ModuleConfig; import org.apache.dubbo.config.MonitorConfig; import org.apache.dubbo.config.ProtocolConfig; import org.apache.dubbo.config.ProviderConfig; import org.apache.dubbo.config.ReferenceConfig; import org.apache.dubbo.config.RegistryConfig; import org.apache.dubbo.config.ServiceConfig; import org.apache.dubbo.config.SslConfig; import org.apache.dubbo.config.TracingConfig; import org.apache.dubbo.monitor.MonitorFactory; import org.apache.dubbo.monitor.MonitorService; import org.apache.dubbo.registry.RegistryService; import org.apache.dubbo.remoting.Codec2; import org.apache.dubbo.remoting.Dispatcher; import org.apache.dubbo.remoting.Transporter; import org.apache.dubbo.remoting.exchange.Exchanger; import org.apache.dubbo.remoting.telnet.TelnetHandler; import org.apache.dubbo.rpc.ExporterListener; import org.apache.dubbo.rpc.Filter; import org.apache.dubbo.rpc.InvokerListener; import org.apache.dubbo.rpc.ProxyFactory; import org.apache.dubbo.rpc.cluster.Cluster; import org.apache.dubbo.rpc.cluster.LoadBalance; import org.apache.dubbo.rpc.cluster.filter.ClusterFilter; import org.apache.dubbo.rpc.model.ScopeModel; import org.apache.dubbo.rpc.model.ScopeModelUtil; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; import static org.apache.dubbo.common.constants.CommonConstants.ANYHOST_VALUE; import static org.apache.dubbo.common.constants.CommonConstants.CLUSTER_KEY; import static org.apache.dubbo.common.constants.CommonConstants.CONFIG_NAMESPACE_KEY; import static org.apache.dubbo.common.constants.CommonConstants.DEFAULT_KEY; import static org.apache.dubbo.common.constants.CommonConstants.DUBBO_PROTOCOL; import static org.apache.dubbo.common.constants.CommonConstants.DubboProperty.DUBBO_IP_TO_REGISTRY; import static org.apache.dubbo.common.constants.CommonConstants.DubboProperty.DUBBO_MONITOR_ADDRESS; import static org.apache.dubbo.common.constants.CommonConstants.FILE_KEY; import static org.apache.dubbo.common.constants.CommonConstants.FILTER_KEY; import static org.apache.dubbo.common.constants.CommonConstants.GROUP_KEY; import static org.apache.dubbo.common.constants.CommonConstants.HOST_KEY; import static org.apache.dubbo.common.constants.CommonConstants.INTERFACE_KEY; import static org.apache.dubbo.common.constants.CommonConstants.LOADBALANCE_KEY; import static org.apache.dubbo.common.constants.CommonConstants.LOCALHOST_VALUE; import static org.apache.dubbo.common.constants.CommonConstants.PASSWORD_KEY; import static org.apache.dubbo.common.constants.CommonConstants.PATH_KEY; import static org.apache.dubbo.common.constants.CommonConstants.PROTOCOL_KEY; import static org.apache.dubbo.common.constants.CommonConstants.REMOVE_VALUE_PREFIX; import static org.apache.dubbo.common.constants.CommonConstants.SHUTDOWN_WAIT_KEY; import static org.apache.dubbo.common.constants.CommonConstants.SHUTDOWN_WAIT_SECONDS_KEY; import static org.apache.dubbo.common.constants.CommonConstants.THREADPOOL_KEY; import static org.apache.dubbo.common.constants.CommonConstants.USERNAME_KEY; import static org.apache.dubbo.common.constants.CommonConstants.VERSION_KEY; import static org.apache.dubbo.common.constants.LoggerCodeConstants.CONFIG_PARAMETER_FORMAT_ERROR; import static org.apache.dubbo.common.constants.RegistryConstants.DEFAULT_REGISTER_MODE_ALL; import static org.apache.dubbo.common.constants.RegistryConstants.DEFAULT_REGISTER_MODE_INSTANCE; import static org.apache.dubbo.common.constants.RegistryConstants.DEFAULT_REGISTER_MODE_INTERFACE; import static org.apache.dubbo.common.constants.RegistryConstants.DUBBO_REGISTER_MODE_DEFAULT_KEY; import static org.apache.dubbo.common.constants.RegistryConstants.REGISTER_MODE_KEY; import static org.apache.dubbo.common.constants.RegistryConstants.REGISTRY_CLUSTER_KEY; import static org.apache.dubbo.common.constants.RegistryConstants.REGISTRY_KEY; import static org.apache.dubbo.common.constants.RegistryConstants.REGISTRY_PROTOCOL; import static org.apache.dubbo.common.constants.RegistryConstants.REGISTRY_PROTOCOL_TYPE; import static org.apache.dubbo.common.constants.RegistryConstants.REGISTRY_TYPE_KEY; import static org.apache.dubbo.common.constants.RegistryConstants.SERVICE_REGISTRY_PROTOCOL; import static org.apache.dubbo.common.constants.RemotingConstants.BACKUP_KEY; import static org.apache.dubbo.common.utils.StringUtils.isEmpty; import static org.apache.dubbo.common.utils.StringUtils.isNotEmpty; import static org.apache.dubbo.config.Constants.ARCHITECTURE; import static org.apache.dubbo.config.Constants.CONTEXTPATH_KEY; import static org.apache.dubbo.config.Constants.ENVIRONMENT; import static org.apache.dubbo.config.Constants.IGNORE_CHECK_KEYS; import static org.apache.dubbo.config.Constants.LAYER_KEY; import static org.apache.dubbo.config.Constants.NAME; import static org.apache.dubbo.config.Constants.ORGANIZATION; import static org.apache.dubbo.config.Constants.OWNER; import static org.apache.dubbo.config.Constants.REGISTER_KEY; import static org.apache.dubbo.config.Constants.STATUS_KEY; import static org.apache.dubbo.monitor.Constants.LOGSTAT_PROTOCOL; import static org.apache.dubbo.registry.Constants.REGISTER_IP_KEY; import static org.apache.dubbo.registry.Constants.SUBSCRIBE_KEY; import static org.apache.dubbo.remoting.Constants.CLIENT_KEY; import static org.apache.dubbo.remoting.Constants.CODEC_KEY; import static org.apache.dubbo.remoting.Constants.DISPATCHER_KEY; import static org.apache.dubbo.remoting.Constants.EXCHANGER_KEY; import static org.apache.dubbo.remoting.Constants.PREFER_SERIALIZATION_KEY; import static org.apache.dubbo.remoting.Constants.SERIALIZATION_KEY; import static org.apache.dubbo.remoting.Constants.SERVER_KEY; import static org.apache.dubbo.remoting.Constants.TELNET_KEY; import static org.apache.dubbo.remoting.Constants.TRANSPORTER_KEY; import static org.apache.dubbo.rpc.Constants.FAIL_PREFIX; import static org.apache.dubbo.rpc.Constants.FORCE_PREFIX; import static org.apache.dubbo.rpc.Constants.LOCAL_KEY; import static org.apache.dubbo.rpc.Constants.MOCK_KEY; import static org.apache.dubbo.rpc.Constants.PROXY_KEY; import static org.apache.dubbo.rpc.Constants.RETURN_PREFIX; import static org.apache.dubbo.rpc.Constants.THROW_PREFIX; import static org.apache.dubbo.rpc.Constants.TOKEN_KEY; import static org.apache.dubbo.rpc.cluster.Constants.REFER_KEY; public class ConfigValidationUtils { private static ErrorTypeAwareLogger logger = LoggerFactory.getErrorTypeAwareLogger(ConfigValidationUtils.class); /** * The maximum length of a <b>parameter's value</b> */ private static final int MAX_LENGTH = 200; /** * The maximum length of a <b>path</b> */ private static final int MAX_PATH_LENGTH = 200; /** * The rule qualification for <b>name</b> */ private static final Pattern PATTERN_NAME = Pattern.compile("[\\-._0-9a-zA-Z]+"); /** * The rule qualification for <b>multiply name</b> */ private static final Pattern PATTERN_MULTI_NAME = Pattern.compile("[,\\-._0-9a-zA-Z]+"); /** * The rule qualification for <b>method names</b> */ private static final Pattern PATTERN_METHOD_NAME = Pattern.compile("[a-zA-Z][0-9a-zA-Z]*"); /** * The rule qualification for <b>path</b> */ private static final Pattern PATTERN_PATH = Pattern.compile("[/\\-$._0-9a-zA-Z]+"); /** * The pattern matches a value who has a symbol */ private static final Pattern PATTERN_NAME_HAS_SYMBOL = Pattern.compile("[:*,\\s/\\-._0-9a-zA-Z]+"); /** * The pattern matches a property key */ private static final Pattern PATTERN_KEY = Pattern.compile("[*,\\-._0-9a-zA-Z]+"); public static final String IPV6_START_MARK = "["; public static final String IPV6_END_MARK = "]"; public static List<URL> loadRegistries(AbstractInterfaceConfig interfaceConfig, boolean provider) { // check && override if necessary List<URL> registryList = new ArrayList<>(); ApplicationConfig application = interfaceConfig.getApplication(); List<RegistryConfig> registries = interfaceConfig.getRegistries(); if (CollectionUtils.isNotEmpty(registries)) { for (RegistryConfig config : registries) { // try to refresh registry in case it is set directly by user using config.setRegistries() if (!config.isRefreshed()) { config.refresh(); } String address = config.getAddress(); if (StringUtils.isEmpty(address)) { address = ANYHOST_VALUE; } if (!RegistryConfig.NO_AVAILABLE.equalsIgnoreCase(address)) { Map<String, String> map = new HashMap<>(); AbstractConfig.appendParameters(map, application); AbstractConfig.appendParameters(map, config); map.put(PATH_KEY, RegistryService.class.getName()); AbstractInterfaceConfig.appendRuntimeParameters(map); if (!map.containsKey(PROTOCOL_KEY)) { map.put(PROTOCOL_KEY, DUBBO_PROTOCOL); } String registryCluster = config.getId(); if (isEmpty(registryCluster)) { registryCluster = DEFAULT_KEY; } if (map.containsKey(CONFIG_NAMESPACE_KEY)) { registryCluster += ":" + map.get(CONFIG_NAMESPACE_KEY); } map.put(REGISTRY_CLUSTER_KEY, registryCluster); List<URL> urls = UrlUtils.parseURLs(address, map); for (URL url : urls) { url = URLBuilder.from(url) .addParameter(REGISTRY_KEY, url.getProtocol()) .setProtocol(extractRegistryType(url)) .setScopeModel(interfaceConfig.getScopeModel()) .build(); // provider delay register state will be checked in RegistryProtocol#export if (provider && url.getParameter(REGISTER_KEY, true)) { registryList.add(url); } if (!provider && url.getParameter(SUBSCRIBE_KEY, true)) { registryList.add(url); } } } } } return genCompatibleRegistries(interfaceConfig.getScopeModel(), registryList, provider); } private static List<URL> genCompatibleRegistries(ScopeModel scopeModel, List<URL> registryList, boolean provider) { List<URL> result = new ArrayList<>(registryList.size()); registryList.forEach(registryURL -> { if (provider) { // for registries enabled service discovery, automatically register interface compatible addresses. String registerMode; if (SERVICE_REGISTRY_PROTOCOL.equals(registryURL.getProtocol())) { registerMode = registryURL.getParameter( REGISTER_MODE_KEY, ConfigurationUtils.getCachedDynamicProperty( scopeModel, DUBBO_REGISTER_MODE_DEFAULT_KEY, DEFAULT_REGISTER_MODE_INSTANCE)); if (!isValidRegisterMode(registerMode)) { registerMode = DEFAULT_REGISTER_MODE_INSTANCE; } result.add(registryURL); if (DEFAULT_REGISTER_MODE_ALL.equalsIgnoreCase(registerMode) && registryNotExists(registryURL, registryList, REGISTRY_PROTOCOL)) { URL interfaceCompatibleRegistryURL = URLBuilder.from(registryURL) .setProtocol(REGISTRY_PROTOCOL) .removeParameter(REGISTRY_TYPE_KEY) .build(); result.add(interfaceCompatibleRegistryURL); } } else { registerMode = registryURL.getParameter( REGISTER_MODE_KEY, ConfigurationUtils.getCachedDynamicProperty( scopeModel, DUBBO_REGISTER_MODE_DEFAULT_KEY, DEFAULT_REGISTER_MODE_ALL)); if (!isValidRegisterMode(registerMode)) { registerMode = DEFAULT_REGISTER_MODE_INTERFACE; } if ((DEFAULT_REGISTER_MODE_INSTANCE.equalsIgnoreCase(registerMode) || DEFAULT_REGISTER_MODE_ALL.equalsIgnoreCase(registerMode)) && registryNotExists(registryURL, registryList, SERVICE_REGISTRY_PROTOCOL)) { URL serviceDiscoveryRegistryURL = URLBuilder.from(registryURL) .setProtocol(SERVICE_REGISTRY_PROTOCOL) .removeParameter(REGISTRY_TYPE_KEY) .build(); result.add(serviceDiscoveryRegistryURL); } if (DEFAULT_REGISTER_MODE_INTERFACE.equalsIgnoreCase(registerMode) || DEFAULT_REGISTER_MODE_ALL.equalsIgnoreCase(registerMode)) { result.add(registryURL); } } FrameworkStatusReportService reportService = ScopeModelUtil.getApplicationModel(scopeModel) .getBeanFactory() .getBean(FrameworkStatusReportService.class); reportService.reportRegistrationStatus(reportService.createRegistrationReport(registerMode)); } else { result.add(registryURL); } }); return result; } private static boolean isValidRegisterMode(String mode) { return isNotEmpty(mode) && (DEFAULT_REGISTER_MODE_INTERFACE.equalsIgnoreCase(mode) || DEFAULT_REGISTER_MODE_INSTANCE.equalsIgnoreCase(mode) || DEFAULT_REGISTER_MODE_ALL.equalsIgnoreCase(mode)); } private static boolean registryNotExists(URL registryURL, List<URL> registryList, String registryType) { return registryList.stream() .noneMatch(url -> registryType.equals(url.getProtocol()) && registryURL.getBackupAddress().equals(url.getBackupAddress())); } public static URL loadMonitor(AbstractInterfaceConfig interfaceConfig, URL registryURL) { Map<String, String> map = new HashMap<>(); map.put(INTERFACE_KEY, MonitorService.class.getName()); AbstractInterfaceConfig.appendRuntimeParameters(map); // set ip String hostToRegistry = ConfigUtils.getSystemProperty(DUBBO_IP_TO_REGISTRY); if (StringUtils.isEmpty(hostToRegistry)) { hostToRegistry = NetUtils.getLocalHost(); } else if (NetUtils.isInvalidLocalHost(hostToRegistry)) { throw new IllegalArgumentException("Specified invalid registry ip from property:" + DUBBO_IP_TO_REGISTRY + ", value:" + hostToRegistry); } map.put(REGISTER_IP_KEY, hostToRegistry); MonitorConfig monitor = interfaceConfig.getMonitor(); ApplicationConfig application = interfaceConfig.getApplication(); AbstractConfig.appendParameters(map, monitor); AbstractConfig.appendParameters(map, application); String address = null; String sysAddress = SystemPropertyConfigUtils.getSystemProperty(DUBBO_MONITOR_ADDRESS); if (sysAddress != null && sysAddress.length() > 0) { address = sysAddress; } else if (monitor != null) { address = monitor.getAddress(); } String protocol = monitor == null ? null : monitor.getProtocol(); if (monitor != null && (REGISTRY_PROTOCOL.equals(protocol) || SERVICE_REGISTRY_PROTOCOL.equals(protocol)) && registryURL != null) { return URLBuilder.from(registryURL) .setProtocol(DUBBO_PROTOCOL) .addParameter(PROTOCOL_KEY, protocol) .putAttribute(REFER_KEY, map) .build(); } else if (ConfigUtils.isNotEmpty(address) || ConfigUtils.isNotEmpty(protocol)) { if (!map.containsKey(PROTOCOL_KEY)) { if (interfaceConfig .getScopeModel() .getExtensionLoader(MonitorFactory.class) .hasExtension(LOGSTAT_PROTOCOL)) { map.put(PROTOCOL_KEY, LOGSTAT_PROTOCOL); } else if (ConfigUtils.isNotEmpty(protocol)) { map.put(PROTOCOL_KEY, protocol); } else { map.put(PROTOCOL_KEY, DUBBO_PROTOCOL); } } if (ConfigUtils.isEmpty(address)) { address = LOCALHOST_VALUE; } return UrlUtils.parseURL(address, map); } return null; } public static void validateAbstractInterfaceConfig(AbstractInterfaceConfig config) { checkName(LOCAL_KEY, config.getLocal()); checkName("stub", config.getStub()); checkMultiName("owner", config.getOwner()); checkExtension(config.getScopeModel(), ProxyFactory.class, PROXY_KEY, config.getProxy()); checkExtension(config.getScopeModel(), Cluster.class, CLUSTER_KEY, config.getCluster()); checkMultiExtension( config.getScopeModel(), Arrays.asList(Filter.class, ClusterFilter.class), FILTER_KEY, config.getFilter()); checkNameHasSymbol(LAYER_KEY, config.getLayer()); List<MethodConfig> methods = config.getMethods(); if (CollectionUtils.isNotEmpty(methods)) { methods.forEach(ConfigValidationUtils::validateMethodConfig); } } public static void validateServiceConfig(ServiceConfig config) { checkKey(VERSION_KEY, config.getVersion()); checkKey(GROUP_KEY, config.getGroup()); checkName(TOKEN_KEY, config.getToken()); checkPathName(PATH_KEY, config.getPath()); checkMultiExtension(config.getScopeModel(), ExporterListener.class, "listener", config.getListener()); validateAbstractInterfaceConfig(config); List<RegistryConfig> registries = config.getRegistries(); if (registries != null) { for (RegistryConfig registry : registries) { validateRegistryConfig(registry); } } List<ProtocolConfig> protocols = config.getProtocols(); if (protocols != null) { for (ProtocolConfig protocol : protocols) { validateProtocolConfig(protocol); } } ProviderConfig providerConfig = config.getProvider(); if (providerConfig != null) { validateProviderConfig(providerConfig); } } public static void validateReferenceConfig(ReferenceConfig config) { checkMultiExtension(config.getScopeModel(), InvokerListener.class, "listener", config.getListener()); checkKey(VERSION_KEY, config.getVersion()); checkKey(GROUP_KEY, config.getGroup()); checkName(CLIENT_KEY, config.getClient()); validateAbstractInterfaceConfig(config); List<RegistryConfig> registries = config.getRegistries(); if (registries != null) { for (RegistryConfig registry : registries) { validateRegistryConfig(registry); } } ConsumerConfig consumerConfig = config.getConsumer(); if (consumerConfig != null) { validateConsumerConfig(consumerConfig); } } public static void validateConfigCenterConfig(ConfigCenterConfig config) { if (config != null) { checkParameterName(config.getParameters()); } } public static void validateApplicationConfig(ApplicationConfig config) { if (config == null) { return; } if (!config.isValid()) { throw new IllegalStateException("No application config found or it's not a valid config! " + "Please add <dubbo:application name=\"...\" /> to your spring config."); } // backward compatibility ScopeModel scopeModel = ScopeModelUtil.getOrDefaultApplicationModel(config.getScopeModel()); PropertiesConfiguration configuration = scopeModel.modelEnvironment().getPropertiesConfiguration(); String wait = configuration.getProperty(SHUTDOWN_WAIT_KEY); if (wait != null && wait.trim().length() > 0) { System.setProperty(SHUTDOWN_WAIT_KEY, wait.trim()); } else { wait = configuration.getProperty(SHUTDOWN_WAIT_SECONDS_KEY); if (wait != null && wait.trim().length() > 0) { System.setProperty(SHUTDOWN_WAIT_SECONDS_KEY, wait.trim()); } } checkName(NAME, config.getName()); checkMultiName(OWNER, config.getOwner()); checkName(ORGANIZATION, config.getOrganization()); checkName(ARCHITECTURE, config.getArchitecture()); checkName(ENVIRONMENT, config.getEnvironment()); checkParameterName(config.getParameters()); checkQosDependency(config); } private static void checkQosDependency(ApplicationConfig config) { if (!Boolean.FALSE.equals(config.getQosEnable())) { try { ClassUtils.forName("org.apache.dubbo.qos.protocol.QosProtocolWrapper"); } catch (ClassNotFoundException e) { logger.info( "QosProtocolWrapper not found, qos will not be enabled, please check if 'dubbo-qos' dependency was imported correctly."); } } } public static void validateModuleConfig(ModuleConfig config) { if (config != null) { checkName(NAME, config.getName()); checkName(OWNER, config.getOwner()); checkName(ORGANIZATION, config.getOrganization()); } } public static boolean isValidMetadataConfig(MetadataReportConfig metadataReportConfig) { if (metadataReportConfig == null) { return false; } if (Boolean.FALSE.equals(metadataReportConfig.getReportMetadata()) && Boolean.FALSE.equals(metadataReportConfig.getReportDefinition())) { return false; } return !isEmpty(metadataReportConfig.getAddress()); } public static void validateMetadataConfig(MetadataReportConfig metadataReportConfig) { if (!isValidMetadataConfig(metadataReportConfig)) { return; } String address = metadataReportConfig.getAddress(); String protocol = metadataReportConfig.getProtocol(); if ((isEmpty(address) || !address.contains("://")) && isEmpty(protocol)) { throw new IllegalArgumentException( "Please specify valid protocol or address for metadata report " + address); } } public static void validateMetricsConfig(MetricsConfig metricsConfig) { if (metricsConfig == null) { return; } } public static void validateTracingConfig(TracingConfig tracingConfig) { if (tracingConfig == null) { return; } } public static void validateSslConfig(SslConfig sslConfig) { if (sslConfig == null) { return; } } public static void validateMonitorConfig(MonitorConfig config) { if (config != null) { if (!config.isValid()) { logger.info("There's no valid monitor config found, if you want to open monitor statistics for Dubbo, " + "please make sure your monitor is configured properly."); } checkParameterName(config.getParameters()); } } public static void validateProtocolConfig(ProtocolConfig config) { if (config != null) { String name = config.getName(); checkName("name", name); checkHost(HOST_KEY, config.getHost()); checkPathName("contextpath", config.getContextpath()); if (DUBBO_PROTOCOL.equals(name)) { checkMultiExtension(config.getScopeModel(), Codec2.class, CODEC_KEY, config.getCodec()); checkMultiExtension( config.getScopeModel(), Serialization.class, SERIALIZATION_KEY, config.getSerialization()); checkMultiExtension( config.getScopeModel(), Serialization.class, PREFER_SERIALIZATION_KEY, config.getPreferSerialization()); checkMultiExtension(config.getScopeModel(), Transporter.class, SERVER_KEY, config.getServer()); checkMultiExtension(config.getScopeModel(), Transporter.class, CLIENT_KEY, config.getClient()); } checkMultiExtension(config.getScopeModel(), TelnetHandler.class, TELNET_KEY, config.getTelnet()); checkMultiExtension(config.getScopeModel(), StatusChecker.class, "status", config.getStatus()); checkExtension(config.getScopeModel(), Transporter.class, TRANSPORTER_KEY, config.getTransporter()); checkExtension(config.getScopeModel(), Exchanger.class, EXCHANGER_KEY, config.getExchanger()); checkExtension(config.getScopeModel(), Dispatcher.class, DISPATCHER_KEY, config.getDispatcher()); checkExtension(config.getScopeModel(), Dispatcher.class, "dispather", config.getDispatcher()); checkExtension(config.getScopeModel(), ThreadPool.class, THREADPOOL_KEY, config.getThreadpool()); } } public static void validateProviderConfig(ProviderConfig config) { checkPathName(CONTEXTPATH_KEY, config.getContextpath()); checkExtension(config.getScopeModel(), ThreadPool.class, THREADPOOL_KEY, config.getThreadpool()); checkMultiExtension(config.getScopeModel(), TelnetHandler.class, TELNET_KEY, config.getTelnet()); checkMultiExtension(config.getScopeModel(), StatusChecker.class, STATUS_KEY, config.getStatus()); checkExtension(config.getScopeModel(), Transporter.class, TRANSPORTER_KEY, config.getTransporter()); checkExtension(config.getScopeModel(), Exchanger.class, EXCHANGER_KEY, config.getExchanger()); checkMultiExtension(config.getScopeModel(), Serialization.class, SERIALIZATION_KEY, config.getSerialization()); checkMultiExtension( config.getScopeModel(), Serialization.class, PREFER_SERIALIZATION_KEY, config.getPreferSerialization()); } public static void validateConsumerConfig(ConsumerConfig config) { if (config == null) { return; } } public static void validateRegistryConfig(RegistryConfig config) { checkName(PROTOCOL_KEY, config.getProtocol()); checkName(USERNAME_KEY, config.getUsername()); checkLength(PASSWORD_KEY, config.getPassword()); checkPathLength(FILE_KEY, config.getFile()); checkName(TRANSPORTER_KEY, config.getTransporter()); checkName(SERVER_KEY, config.getServer()); checkName(CLIENT_KEY, config.getClient()); checkParameterName(config.getParameters()); } public static void validateMethodConfig(MethodConfig config) { checkExtension(config.getScopeModel(), LoadBalance.class, LOADBALANCE_KEY, config.getLoadbalance()); checkParameterName(config.getParameters()); checkMethodName("name", config.getName()); String mock = config.getMock(); if (isNotEmpty(mock)) { if (mock.startsWith(RETURN_PREFIX) || mock.startsWith(THROW_PREFIX + " ")) { checkLength(MOCK_KEY, mock); } else if (mock.startsWith(FAIL_PREFIX) || mock.startsWith(FORCE_PREFIX)) { checkNameHasSymbol(MOCK_KEY, mock); } else { checkName(MOCK_KEY, mock); } } } private static String extractRegistryType(URL url) { return UrlUtils.hasServiceDiscoveryRegistryTypeKey(url) ? SERVICE_REGISTRY_PROTOCOL : getRegistryProtocolType(url); } private static String getRegistryProtocolType(URL url) { String registryProtocol = url.getParameter(REGISTRY_PROTOCOL_TYPE); return isNotEmpty(registryProtocol) ? registryProtocol : REGISTRY_PROTOCOL; } public static void checkExtension(ScopeModel scopeModel, Class<?> type, String property, String value) { checkName(property, value); if (isNotEmpty(value) && !scopeModel.getExtensionLoader(type).hasExtension(value)) { throw new IllegalStateException("No such extension " + value + " for " + property + "/" + type.getName()); } } /** * Check whether there is a <code>Extension</code> who's name (property) is <code>value</code> (special treatment is * required) * * @param type The Extension type * @param property The extension key * @param value The Extension name */ public static void checkMultiExtension(ScopeModel scopeModel, Class<?> type, String property, String value) { checkMultiExtension(scopeModel, Collections.singletonList(type), property, value); } public static void checkMultiExtension(ScopeModel scopeModel, List<Class<?>> types, String property, String value) { checkMultiName(property, value); if (isNotEmpty(value)) { String[] values = value.split("\\s*[,]+\\s*"); for (String v : values) { v = StringUtils.trim(v); if (v.startsWith(REMOVE_VALUE_PREFIX)) { continue; } if (DEFAULT_KEY.equals(v)) { continue; } boolean match = false; for (Class<?> type : types) { if (scopeModel.getExtensionLoader(type).hasExtension(v)) { match = true; } } if (!match) { throw new IllegalStateException("No such extension " + v + " for " + property + "/" + types.stream().map(Class::getName).collect(Collectors.joining(","))); } } } } public static void checkLength(String property, String value) { checkProperty(property, value, MAX_LENGTH, null); } public static void checkPathLength(String property, String value) { checkProperty(property, value, MAX_PATH_LENGTH, null); } public static void checkName(String property, String value) { checkProperty(property, value, MAX_LENGTH, PATTERN_NAME); } public static void checkHost(String property, String value) { if (StringUtils.isEmpty(value)) { return; } if (value.startsWith(IPV6_START_MARK) && value.endsWith(IPV6_END_MARK)) { // if the value start with "[" and end with "]", check whether it is IPV6 try { InetAddress.getByName(value); return; } catch (UnknownHostException e) { // not a IPv6 string, do nothing, go on to checkName } } checkName(property, value); } public static void checkNameHasSymbol(String property, String value) { checkProperty(property, value, MAX_LENGTH, PATTERN_NAME_HAS_SYMBOL); } public static void checkKey(String property, String value) { checkProperty(property, value, MAX_LENGTH, PATTERN_KEY); } public static void checkMultiName(String property, String value) { checkProperty(property, value, MAX_LENGTH, PATTERN_MULTI_NAME); } public static void checkPathName(String property, String value) { checkProperty(property, value, MAX_PATH_LENGTH, PATTERN_PATH); } public static void checkMethodName(String property, String value) { checkProperty(property, value, MAX_LENGTH, PATTERN_METHOD_NAME); } public static void checkParameterName(Map<String, String> parameters) { if (CollectionUtils.isEmptyMap(parameters)) { return; } List<String> ignoreCheckKeys = new ArrayList<>(); ignoreCheckKeys.add(BACKUP_KEY); String ignoreCheckKeysStr = parameters.get(IGNORE_CHECK_KEYS); if (!StringUtils.isBlank(ignoreCheckKeysStr)) { ignoreCheckKeys.addAll(Arrays.asList(ignoreCheckKeysStr.split(","))); } for (Map.Entry<String, String> entry : parameters.entrySet()) { if (!ignoreCheckKeys.contains(entry.getKey())) { checkNameHasSymbol(entry.getKey(), entry.getValue()); } } } public static void checkProperty(String property, String value, int maxlength, Pattern pattern) { if (StringUtils.isEmpty(value)) { return; } if (value.length() > maxlength) { logger.error( CONFIG_PARAMETER_FORMAT_ERROR, "the value content is too long", "", "Parameter value format error. Invalid " + property + "=\"" + value + "\" is longer than " + maxlength); } if (pattern != null) { Matcher matcher = pattern.matcher(value); if (!matcher.matches()) { logger.error( CONFIG_PARAMETER_FORMAT_ERROR, "the value content is illegal character", "", "Parameter value format error. Invalid " + property + "=\"" + value + "\" contains illegal " + "character, only digit, letter, '-', '_' or '.' is legal."); } } } }
apache/incubator-hugegraph
36,710
hugegraph-store/hg-store-client/src/main/java/org/apache/hugegraph/store/client/NodeTxSessionProxy.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hugegraph.store.client; import static java.util.stream.Collectors.groupingBy; import static org.apache.hugegraph.store.client.util.HgAssert.isArgumentNotNull; import static org.apache.hugegraph.store.client.util.HgAssert.isArgumentValid; import static org.apache.hugegraph.store.client.util.HgAssert.isFalse; import static org.apache.hugegraph.store.client.util.HgStoreClientConst.EMPTY_STRING; import static org.apache.hugegraph.store.client.util.HgStoreClientUtil.err; import static org.apache.hugegraph.store.client.util.HgStoreClientUtil.toStr; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.function.BiFunction; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; import javax.annotation.concurrent.NotThreadSafe; import org.apache.hugegraph.store.HgKvEntry; import org.apache.hugegraph.store.HgKvIterator; import org.apache.hugegraph.store.HgKvOrderedIterator; import org.apache.hugegraph.store.HgOwnerKey; import org.apache.hugegraph.store.HgScanQuery; import org.apache.hugegraph.store.HgStoreSession; import org.apache.hugegraph.store.client.grpc.KvBatchScanner; import org.apache.hugegraph.store.client.grpc.KvCloseableIterator; import org.apache.hugegraph.store.client.util.HgAssert; import org.apache.hugegraph.store.client.util.HgStoreClientConst; import org.apache.hugegraph.store.client.util.HgStoreClientUtil; import org.apache.hugegraph.store.grpc.stream.ScanStreamReq.Builder; import org.apache.hugegraph.store.term.HgPair; import org.apache.hugegraph.store.term.HgTriple; import lombok.extern.slf4j.Slf4j; /** * created on 2022/01/19 * * @version 0.6.0 added batch scan on 2022/03/03 */ @Slf4j @NotThreadSafe class NodeTxSessionProxy implements HgStoreSession { private final HgStoreNodeManager nodeManager; private final HgStoreNodePartitioner nodePartitioner; private final String graphName; private final NodeTxExecutor txExecutor; NodeTxSessionProxy(String graphName, HgStoreNodeManager nodeManager) { this.nodeManager = nodeManager; this.graphName = graphName; this.nodePartitioner = this.nodeManager.getNodePartitioner(); this.txExecutor = NodeTxExecutor.graphOf(this.graphName, this); isFalse(this.nodePartitioner == null, "Failed to retrieve the node-partitioner from node-manager."); } @Override public void beginTx() { this.txExecutor.setTx(true); } @Override public void commit() { this.txExecutor.commitTx(); } @Override public void rollback() { this.txExecutor.rollbackTx(); } @Override public boolean isTx() { return this.txExecutor.isTx(); } @Override public boolean put(String table, HgOwnerKey ownerKey, byte[] value) { // isArgumentValid(table, "table"); // isArgumentNotNull(ownerKey, "ownerKey"); // log.info("put -> graph: {}, table: {}, key: {}, value: {}", // graphName, table, ownerKey, toByteStr(value)); // return this.txExecutor.prepareTx( // () -> getNodeStream(table, ownerKey), // e -> e.session.put(table, e.data.getKey(), value) // ); return this.txExecutor.prepareTx(new HgTriple(table, ownerKey, null), e -> e.getSession().put(table, e.getKey(), value)); } @Override public boolean directPut(String table, int partitionId, HgOwnerKey ownerKey, byte[] value) { isArgumentValid(table, "table"); isArgumentNotNull(ownerKey, "ownerKey"); return this.txExecutor.prepareTx( new HgTriple(table, ownerKey, partitionId), e -> e.getSession().put(table, e.getKey(), value) ); } @Override public boolean delete(String table, HgOwnerKey ownerKey) { HgAssert.isFalse(HgAssert.isInvalid(table), "The argument is invalid: table"); HgAssert.isFalse(ownerKey == null, "The argument is invalid: ownerKey"); if (log.isDebugEnabled()) { log.debug("delete -> graph: {}, table: {}, key: {}" , graphName, table, toStr(ownerKey)); } return this.txExecutor .prepareTx( new HgTriple(table, ownerKey, null), e -> e.getSession().delete(table, e.getKey()) ); } @Override public boolean deleteSingle(String table, HgOwnerKey ownerKey) { HgAssert.isFalse(HgAssert.isInvalid(table), "The argument is invalid: table"); HgAssert.isFalse(ownerKey == null, "The argument is invalid: ownerKey"); if (log.isDebugEnabled()) { log.debug("deleteSingle -> graph: {}, table: {}, key: {}" , graphName, table, toStr(ownerKey)); } return this.txExecutor .prepareTx( new HgTriple(table, ownerKey, null), e -> e.getSession().deleteSingle(table, e.getKey()) ); } @Override public boolean deletePrefix(String table, HgOwnerKey prefix) { HgAssert.isFalse(HgAssert.isInvalid(table), "The argument is invalid: table"); HgAssert.isFalse(prefix == null, "The argument is invalid: prefix"); if (log.isDebugEnabled()) { log.debug("deletePrefix -> graph: {}, table: {}, prefix: {}" , graphName, table, toStr(prefix)); } return this.txExecutor .prepareTx( new HgTriple(table, prefix, null), e -> e.getSession().deletePrefix(table, e.getKey()) ); } @Override public boolean deleteRange(String table, HgOwnerKey start, HgOwnerKey end) { HgAssert.isFalse(HgAssert.isInvalid(table), "The argument is invalid: table"); HgAssert.isFalse(start == null, "The argument is invalid: start"); HgAssert.isFalse(end == null, "The argument is invalid: end"); if (log.isDebugEnabled()) { log.debug("deleteRange -> graph: {}, table: {}, start: {}, end: {}" , graphName, table, toStr(start), toStr(end)); } return this.txExecutor .prepareTx( new HgTriple(table, start, end), e -> e.getSession().deleteRange(table, e.getKey(), e.getEndKey()) ); } @Override public boolean merge(String table, HgOwnerKey key, byte[] value) { HgAssert.isFalse(HgAssert.isInvalid(table), "The argument is invalid: table"); HgAssert.isFalse(key == null, "The argument is invalid: key"); HgAssert.isFalse(value == null, "The argument is invalid: value"); if (log.isDebugEnabled()) { log.debug("merge -> graph: {}, table: {}, key: {}, value: {}" , graphName, table, toStr(key), toStr(value)); } return this.txExecutor .prepareTx( new HgTriple(table, key, value), e -> e.getSession().merge(table, e.getKey(), value) ); } /*--- tx end ---*/ @Override public byte[] get(String table, HgOwnerKey ownerKey) { isArgumentValid(table, "table"); isArgumentNotNull(ownerKey, "ownerKey"); return this.txExecutor .limitOne( () -> this.getNodeStream(table, ownerKey), e -> e.session.get(table, e.data.getKey()), HgStoreClientConst.EMPTY_BYTES ); } @Override public boolean clean(int partId) { Collection<HgNodePartition> nodes = this.doPartition("", partId); return nodes.parallelStream() .map( e -> this.getStoreNode(e.getNodeId()).openSession(this.graphName) .clean(partId) ).findFirst().get(); } @Override @Deprecated public List<HgKvEntry> batchGetOwner(String table, List<HgOwnerKey> keyList) { HgAssert.isFalse(HgAssert.isInvalid(table), "The argument is invalid: table"); HgAssert.isFalse(HgAssert.isInvalid(keyList), "The argument is invalid: keyList"); return this.txExecutor .toList( (l) -> this.getStoreNode(l), keyList, key -> this.toNodeTkvList(table, key, key).stream(), e -> e.session.batchGetOwner(table, e.data) ); } @Override public HgKvIterator<HgKvEntry> batchPrefix(String table, List<HgOwnerKey> keyList) { HgAssert.isFalse(HgAssert.isInvalid(table), "The argument is invalid: table"); HgAssert.isFalse(HgAssert.isInvalid(keyList), "The argument is invalid: keyList"); return this.toHgKvIteratorProxy( this.txExecutor .toList( (l) -> this.getStoreNode(l), keyList, key -> this.toNodeTkvList(table, key, key).stream(), e -> Collections.singletonList(e.session.batchPrefix(table, e.data)) ) , Long.MAX_VALUE); } @Override public boolean truncate() { return this.txExecutor .isAllTrue( () -> this.getNodeStream(EMPTY_STRING), e -> e.session.truncate() ); } @Override public boolean existsTable(String table) { return this.txExecutor .ifAnyTrue( () -> this.getNodeStream(EMPTY_STRING), e -> e.session.existsTable(table) ); } @Override public boolean createTable(String table) { return this.txExecutor .isAllTrue( () -> this.getNodeStream(EMPTY_STRING), e -> e.session.createTable(table) ); } @Override public boolean deleteTable(String table) { return this.txExecutor .isAllTrue( () -> this.getNodeStream(EMPTY_STRING), e -> e.session.deleteTable(table) ); } @Override public boolean dropTable(String table) { return this.txExecutor .isAllTrue( () -> this.getNodeStream(table), e -> e.session.dropTable(table) ); } @Override public boolean deleteGraph(String graph) { return this.txExecutor .isAllTrue( () -> this.getNodeStream(EMPTY_STRING), e -> e.session.deleteGraph(graph) ); } @Override public HgKvIterator<HgKvEntry> scanIterator(String table) { return scanIterator(table, 0); } @Override public HgKvIterator<HgKvEntry> scanIterator(String table, byte[] query) { return scanIterator(table, 0, query); } @Override public HgKvIterator<HgKvEntry> scanIterator(String table, long limit) { HgAssert.isFalse(HgAssert.isInvalid(table), "The argument is invalid: table"); return this.toHgKvIteratorProxy( this.toNodeTkvList(table) .parallelStream() .map( e -> this.getStoreNode(e.getNodeId()).openSession(this.graphName) .scanIterator(e.getTable(), limit) ) .collect(Collectors.toList()) , limit); } @Override public HgKvIterator<HgKvEntry> scanIterator(String table, long limit, byte[] query) { HgAssert.isFalse(HgAssert.isInvalid(table), "The argument is invalid: table"); return this.toHgKvIteratorProxy( this.toNodeTkvList(table) .parallelStream() .map( e -> this.getStoreNode(e.getNodeId()).openSession(this.graphName) .scanIterator(e.getTable(), e.getKey(), limit, query) ) .collect(Collectors.toList()) , limit); } @Override public HgKvIterator<HgKvEntry> scanIterator(String table, HgOwnerKey keyPrefix) { return scanIterator(table, keyPrefix, 0); } @Override public HgKvIterator<HgKvEntry> scanIterator(String table, HgOwnerKey keyPrefix, long limit) { HgAssert.isFalse(HgAssert.isInvalid(table), "The argument is invalid: table"); HgAssert.isFalse(keyPrefix == null, "The argument is invalid: keyPrefix"); return this.toHgKvIteratorProxy( this.toNodeTkvList(table, keyPrefix) .parallelStream() .map( e -> this.getStoreNode(e.getNodeId()).openSession(this.graphName) .scanIterator(e.getTable(), e.getKey(), limit) ) .collect(Collectors.toList()) , limit); } @Override public HgKvIterator<HgKvEntry> scanIterator(String table, HgOwnerKey keyPrefix, long limit, byte[] query) { HgAssert.isFalse(HgAssert.isInvalid(table), "The argument is invalid: table"); HgAssert.isFalse(keyPrefix == null, "The argument is invalid: keyPrefix"); return this.toHgKvIteratorProxy( this.toNodeTkvList(table, keyPrefix) .parallelStream() .map( e -> this.getStoreNode(e.getNodeId()).openSession(this.graphName) .scanIterator(e.getTable(), e.getKey(), limit, query) ) .collect(Collectors.toList()) , limit); } @Override public HgKvIterator<HgKvEntry> scanIterator(String table, HgOwnerKey startKey, HgOwnerKey endKey) { return this.scanIterator(table, startKey, endKey, 0, null); } @Override public HgKvIterator<HgKvEntry> scanIterator(String table, HgOwnerKey startKey, HgOwnerKey endKey, long limit) { HgAssert.isFalse(HgAssert.isInvalid(table), "The argument is invalid: table"); HgAssert.isFalse(startKey == null, "The argument is invalid: startKey"); HgAssert.isFalse(endKey == null, "The argument is invalid: endKey"); return this.toHgKvIteratorProxy( this.toNodeTkvList(table, startKey, endKey) .parallelStream() .map( e -> this.getStoreNode(e.getNodeId()).openSession(this.graphName) .scanIterator(e.getTable(), e.getKey(), e.getEndKey(), limit) ) .collect(Collectors.toList()) , limit); } @Override public HgKvIterator<HgKvEntry> scanIterator(String table, HgOwnerKey startKey, HgOwnerKey endKey , long limit, byte[] query) { HgAssert.isFalse(HgAssert.isInvalid(table), "The argument is invalid: table"); HgAssert.isFalse(startKey == null, "The argument is invalid: startKey"); HgAssert.isFalse(endKey == null, "The argument is invalid: endKey"); return this.toHgKvIteratorProxy( this.toNodeTkvList(table, startKey, endKey) .parallelStream() .map( e -> this.getStoreNode(e.getNodeId()).openSession(this.graphName) .scanIterator(e.getTable(), e.getKey(), e.getEndKey(), limit, query) ) .collect(Collectors.toList()) , limit); } @Override public HgKvIterator<HgKvEntry> scanIterator(String table, HgOwnerKey startKey, HgOwnerKey endKey , long limit, int scanType, byte[] query) { HgAssert.isFalse(HgAssert.isInvalid(table), "The argument is invalid: table"); HgAssert.isFalse(startKey == null, "The argument is invalid: startKey"); HgAssert.isFalse(endKey == null, "The argument is invalid: endKey"); return this.toHgKvIteratorProxy( this.toNodeTkvList(table, startKey, endKey) .parallelStream() .map( e -> this.getStoreNode(e.getNodeId()).openSession(this.graphName) .scanIterator(e.getTable(), e.getKey(), e.getEndKey(), limit, scanType, query) ) .collect(Collectors.toList()) , limit); } @Override public HgKvIterator<HgKvEntry> scanIterator(String table, int codeFrom, int codeTo, int scanType, byte[] query) { if (log.isDebugEnabled()) { log.debug("graph: {}, table: {}, codeFrom: {}, codeTo: {}, scanType: {}, query: {}" , graphName, table, codeFrom, codeTo, scanType, HgStoreClientUtil.toStr(query)); } HgAssert.isFalse(HgAssert.isInvalid(table), "The argument is invalid: table"); return this.toHgKvIteratorProxy( this.toNodeTkvList(table, codeFrom, codeTo) .parallelStream() .map( e -> this.getStoreNode(e.getNodeId()).openSession(this.graphName) .scanIterator(e.getTable() , e.getKey().getKeyCode() , e.getEndKey().getKeyCode(), scanType, query) ) .collect(Collectors.toList()) , 0); } @Override public HgKvIterator<HgKvEntry> scanIterator(Builder scanReqBuilder) { List<NodeTkv> nodeTKvs = this.toNodeTkvList(scanReqBuilder); Function<NodeTkv, HgKvIterator<HgKvEntry>> hgKvIteratorFunction = e -> { HgStoreSession session = this.getStoreNode(e.getNodeId()) .openSession(this.graphName); return session.scanIterator(scanReqBuilder); }; List<HgKvIterator> iterators = nodeTKvs.parallelStream() .map(hgKvIteratorFunction) .collect(Collectors.toList()); return this.toHgKvIteratorProxy(iterators, scanReqBuilder.getLimit()); } @Override public long count(String table) { return this.toNodeTkvList(table) .parallelStream() .map( e -> this.getStoreNode(e.getNodeId()).openSession(this.graphName) .count(e.getTable()) ) .collect(Collectors.summingLong(l -> l)); } @Override public List<HgKvIterator<HgKvEntry>> scanBatch(HgScanQuery scanQuery) { HgAssert.isArgumentNotNull(scanQuery, "scanQuery"); return this.toTkvMapFunc(scanQuery.getScanMethod()) .apply(scanQuery) .entrySet() .stream() .map(e -> this.getStoreNode(e.getKey()) .openSession(this.graphName) .scanBatch(toScanQueryFunc(scanQuery.getScanMethod()) .apply(scanQuery.getTable(), e.getValue()) .setQuery(scanQuery.getQuery()) .setLimit(scanQuery.getLimit()) .setPerKeyLimit(scanQuery.getPerKeyLimit()) .setPerKeyMax((scanQuery.getPerKeyMax())) .setScanType(scanQuery.getScanType()) .build() ) ) //.peek(e->log.info("{}",e)) .flatMap(List::stream) .collect(Collectors.toList()); } @Override public KvCloseableIterator<HgKvIterator<HgKvEntry>> scanBatch2(HgScanQuery scanQuery) { return scanBatch3(scanQuery, null); } @Override public KvCloseableIterator<HgKvIterator<HgKvEntry>> scanBatch3(HgScanQuery scanQuery, KvCloseableIterator iterator) { KvCloseableIterator notifierWrap = KvBatchScanner.ofMerger(scanQuery, (query, notifier) -> { Map<Long, List<NodeTkv>> nodeTkvs = this.toTkvMapFunc(scanQuery.getScanMethod()) .apply(query); nodeTkvs.forEach((storeId, tkvs) -> { this.getStoreNode(storeId) .openSession(this.graphName) .scanBatch3(toScanQueryFunc(scanQuery.getScanMethod()) .apply(scanQuery.getTable(), tkvs) .setQuery(scanQuery.getQuery()) .setLimit(scanQuery.getLimit()) .setSkipDegree(scanQuery.getSkipDegree()) .setPerKeyLimit(scanQuery.getPerKeyLimit()) .setPerKeyMax((scanQuery.getPerKeyMax())) .setScanType(scanQuery.getScanType()) .setOrderType(scanQuery.getOrderType()) .build(), notifier ); }); return true; }); return notifierWrap; } private Function<HgScanQuery, Map<Long, List<NodeTkv>>> toTkvMapFunc( HgScanQuery.ScanMethod scanMethod) { switch (scanMethod) { case RANGE: return scanQuery -> { List<HgOwnerKey> starts = scanQuery.getStartList(); List<HgOwnerKey> ends = scanQuery.getEndList(); int size = starts.size(); return IntStream.range(0, size) .boxed() .map(i -> this.toNodeTkvList(scanQuery.getTable(), starts.get(i), ends.get(i))) .flatMap(List::stream) .collect(groupingBy(NodeTkv::getNodeId)); }; case PREFIX: return scanQuery -> scanQuery.getPrefixList() .stream() .map(keyPrefix -> this.toNodeTkvList(scanQuery.getTable(), keyPrefix)) .flatMap(List::stream) .collect(groupingBy(NodeTkv::getNodeId)); default: return scanQuery -> this.toNodeTkvList(scanQuery.getTable()) .stream() .collect(groupingBy(NodeTkv::getNodeId)); } } private BiFunction<String, List<NodeTkv>, HgScanQuery.ScanBuilder> toScanQueryFunc( HgScanQuery.ScanMethod scanMethod) { switch (scanMethod) { case RANGE: return (table, tkvList) -> { List<HgOwnerKey> startList = new LinkedList(); List<HgOwnerKey> endList = new LinkedList(); tkvList.stream().forEach(e -> { startList.add(e.getKey()); endList.add(e.getEndKey()); }); return HgScanQuery.ScanBuilder.rangeOf(table, startList, endList); }; case PREFIX: return (table, tkvList) -> HgScanQuery.ScanBuilder.prefixOf(table, tkvList.stream() .map(e -> e.getKey()) .collect(Collectors.toList()) ); default: return (table, tkvList) -> HgScanQuery.ScanBuilder.tableOf(table); } } /*-- common --*/ private HgKvIterator toHgKvIteratorProxy(List<HgKvIterator> iteratorList, long limit) { boolean isAllOrderedLimiter = iteratorList.stream() .allMatch( e -> e instanceof HgKvOrderedIterator); HgKvIterator<HgKvEntry> iterator; if (isAllOrderedLimiter) { iterator = new SequencedIterator(iteratorList.stream() .map(e -> (HgKvOrderedIterator) e) .collect(Collectors.toList()), limit); } else { iterator = new TopWorkIteratorProxy(iteratorList, limit); } return iterator; } HgStoreNode getStoreNode(Long nodeId) { HgStoreNode res = this.nodeManager.applyNode(this.graphName, nodeId); if (res == null) { throw err("Failed to apply for an instance of HgStoreNode from node-manager."); } return res; } public boolean doAction(String table, HgOwnerKey startKey, HgOwnerKey endKey, Function<NodeTkv, Boolean> action) { Collection<HgNodePartition> partitions = doPartition(table, startKey.getOwner(), endKey.getOwner()); for (HgNodePartition partition : partitions) { HgStoreNode storeNode = this.getStoreNode(partition.getNodeId()); HgStoreSession session = this.txExecutor.openNodeSession(storeNode); NodeTkv data = new NodeTkv(partition, table, startKey, endKey); data.setSession(session); if (!action.apply(data)) { return false; } } return true; } public boolean doAction(String table, HgOwnerKey startKey, Integer code, Function<NodeTkv, Boolean> action) { Collection<HgNodePartition> partitions = this.doPartition(table, code); for (HgNodePartition partition : partitions) { HgStoreNode storeNode = this.getStoreNode(partition.getNodeId()); HgStoreSession session = this.txExecutor.openNodeSession(storeNode); NodeTkv data = new NodeTkv(partition, table, startKey, code); data.setSession(session); if (!action.apply(data)) { return false; } } return true; } private List<NodeTkv> toNodeTkvList(Builder scanReqBuilder) { // TODO: use builder to get owner String table = scanReqBuilder.getTable(); HgOwnerKey ownerKey = HgStoreClientConst.ALL_PARTITION_OWNER_KEY; byte[] allOwner = ownerKey.getOwner(); Collection<HgNodePartition> partitions = doPartition(table, allOwner, allOwner); List<NodeTkv> nodeTkvs = new ArrayList<>(partitions.size()); for (HgNodePartition partition : partitions) { nodeTkvs.add(new NodeTkv(partition, table, ownerKey, ownerKey)); } return nodeTkvs; } private List<NodeTkv> toNodeTkvList(String table) { Collection<HgNodePartition> partitions = doPartition(table, HgStoreClientConst.ALL_PARTITION_OWNER_KEY.getOwner(), HgStoreClientConst.ALL_PARTITION_OWNER_KEY.getOwner()); ArrayList<NodeTkv> nodeTkvs = new ArrayList<>(partitions.size()); for (HgNodePartition partition : partitions) { nodeTkvs.add(new NodeTkv(partition, table, HgStoreClientConst.ALL_PARTITION_OWNER_KEY, HgStoreClientConst.ALL_PARTITION_OWNER_KEY)); } return nodeTkvs; } private List<NodeTkv> toNodeTkvList(String table, HgOwnerKey ownerKey) { Collection<HgNodePartition> partitions = doPartition(table, ownerKey.getOwner(), ownerKey.getOwner()); ArrayList<NodeTkv> nodeTkvs = new ArrayList<>(partitions.size()); for (HgNodePartition partition : partitions) { nodeTkvs.add(new NodeTkv(partition, table, ownerKey, ownerKey)); } return nodeTkvs; } private List<NodeTkv> toNodeTkvList(String table, HgOwnerKey startKey, HgOwnerKey endKey) { Collection<HgNodePartition> partitions = doPartition(table, startKey.getOwner(), endKey.getOwner()); ArrayList<NodeTkv> nodeTkvs = new ArrayList<>(partitions.size()); for (HgNodePartition partition : partitions) { nodeTkvs.add(new NodeTkv(partition, table, startKey, endKey)); } return nodeTkvs; } private List<NodeTkv> toNodeTkvList(String table, int startCode, int endCode) { Collection<HgNodePartition> partitions = this.doPartition(table, startCode, endCode); ArrayList<NodeTkv> nodeTkvs = new ArrayList<>(partitions.size()); for (HgNodePartition partition : partitions) { nodeTkvs.add( new NodeTkv(partition, table, HgOwnerKey.codeOf(startCode), HgOwnerKey.codeOf(endCode))); } return nodeTkvs; } /** * @return not null */ private Collection<HgNodePartition> doPartition(String table, byte[] startKey, byte[] endKey) { HgNodePartitionerBuilder partitionerBuilder = HgNodePartitionerBuilder.resetAndGet(); int status = this.nodePartitioner.partition(partitionerBuilder, this.graphName, startKey, endKey); if (status != 0) { throw err("The node-partitioner is not work."); } Collection<HgNodePartition> partitions = partitionerBuilder.getPartitions(); if (partitions.isEmpty()) { throw err("Failed to get the collection of HgNodePartition from node-partitioner."); } return partitions; } /** * @return @return not null */ private Collection<HgNodePartition> doPartition(String table, int startCode, int endCode) { HgNodePartitionerBuilder partitionerBuilder = HgNodePartitionerBuilder.resetAndGet(); int status = this.nodePartitioner.partition(partitionerBuilder, this.graphName, startCode, endCode); if (status != 0) { throw err("The node-partitioner is not work."); } Collection<HgNodePartition> partitions = partitionerBuilder.getPartitions(); if (partitions.isEmpty()) { throw err("Failed to get the collection of HgNodePartition from node-partitioner."); } return partitions; } Collection<HgNodePartition> doPartition(String table, int partitionId) { HgNodePartitionerBuilder partitionerBuilder = HgNodePartitionerBuilder.resetAndGet(); int status = this.nodePartitioner.partition(partitionerBuilder, this.graphName, partitionId); if (status != 0) { throw err("The node-partitioner is not work."); } Collection<HgNodePartition> partitions = partitionerBuilder.getPartitions(); if (partitions.isEmpty()) { throw err("Failed to get the collection of HgNodePartition from node-partitioner."); } return partitions; } private Stream<HgPair<HgStoreNode, NodeTkv>> getNodeStream(String table) { return this.toNodeTkvList(table) .stream() .map( e -> new HgPair<>(this.getStoreNode(e.getNodeId()), e) ); } Stream<HgPair<HgStoreNode, NodeTkv>> getNodeStream(String table, HgOwnerKey ownerKey) { return this.toNodeTkvList(table, ownerKey) .stream() .map( e -> new HgPair<>(this.getStoreNode(e.getNodeId()), e) ); } Stream<HgPair<HgStoreNode, NodeTkv>> getNodeStream(String table, HgOwnerKey startKey, HgOwnerKey endKey) { return this.toNodeTkvList(table, startKey, endKey) .stream() .map( e -> new HgPair<>(this.getStoreNode(e.getNodeId()), e) ); } // private List<HgPair<HgStoreNode, NodeTkv>> getNode(String table) { // List<NodeTkv> nodeTkvList = this.toNodeTkvList(table); // return nodeTkv2Node(nodeTkvList); // } List<HgPair<HgStoreNode, NodeTkv>> getNode(String table, HgOwnerKey ownerKey) { List<NodeTkv> nodeTkvList = this.toNodeTkvList(table, ownerKey); return nodeTkv2Node(nodeTkvList); } List<HgPair<HgStoreNode, NodeTkv>> getNode(String table, HgOwnerKey startKey, HgOwnerKey endKey) { List<NodeTkv> nodeTkvList = this.toNodeTkvList(table, startKey, endKey); return nodeTkv2Node(nodeTkvList); } // //boolean doAction(String table, HgOwnerKey startKey, HgOwnerKey endKey, // Function<NodeTkv, Boolean> action) { // return this.doAction(table, startKey, endKey, action); // //} // List<HgPair<HgStoreNode, NodeTkv>> getNode(String table, Integer endKey) { // .stream() // .map(e -> new NodeTkv(e, nodeParams.getX(), nodeParams.getY(), nodeParams.getY // ().getKeyCode())) // .map( // e -> new HgPair<>(this.proxy.getStoreNode(e.getNodeId()), e) // ); // Collection<HgNodePartition> nodePartitions = this.doPartition(table, endKey); // for (HgNodePartition nodePartition: nodePartitions) { // // } // return nodeTkv2Node(nodeTkvList); // // } private List<HgPair<HgStoreNode, NodeTkv>> nodeTkv2Node(Collection<NodeTkv> nodeTkvList) { ArrayList<HgPair<HgStoreNode, NodeTkv>> hgPairs = new ArrayList<>(nodeTkvList.size()); for (NodeTkv nodeTkv : nodeTkvList) { hgPairs.add(new HgPair<>(this.getStoreNode(nodeTkv.getNodeId()), nodeTkv)); } return hgPairs; } }
googleapis/google-cloud-java
36,507
java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/ListRagFilesResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1/vertex_rag_data_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1; /** * * * <pre> * Response message for * [VertexRagDataService.ListRagFiles][google.cloud.aiplatform.v1.VertexRagDataService.ListRagFiles]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.ListRagFilesResponse} */ public final class ListRagFilesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.ListRagFilesResponse) ListRagFilesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListRagFilesResponse.newBuilder() to construct. private ListRagFilesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListRagFilesResponse() { ragFiles_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListRagFilesResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.VertexRagDataServiceProto .internal_static_google_cloud_aiplatform_v1_ListRagFilesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.VertexRagDataServiceProto .internal_static_google_cloud_aiplatform_v1_ListRagFilesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.ListRagFilesResponse.class, com.google.cloud.aiplatform.v1.ListRagFilesResponse.Builder.class); } public static final int RAG_FILES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.aiplatform.v1.RagFile> ragFiles_; /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.RagFile rag_files = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.aiplatform.v1.RagFile> getRagFilesList() { return ragFiles_; } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.RagFile rag_files = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.aiplatform.v1.RagFileOrBuilder> getRagFilesOrBuilderList() { return ragFiles_; } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.RagFile rag_files = 1;</code> */ @java.lang.Override public int getRagFilesCount() { return ragFiles_.size(); } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.RagFile rag_files = 1;</code> */ @java.lang.Override public com.google.cloud.aiplatform.v1.RagFile getRagFiles(int index) { return ragFiles_.get(index); } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.RagFile rag_files = 1;</code> */ @java.lang.Override public com.google.cloud.aiplatform.v1.RagFileOrBuilder getRagFilesOrBuilder(int index) { return ragFiles_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve the next page of results. * Pass to * [ListRagFilesRequest.page_token][google.cloud.aiplatform.v1.ListRagFilesRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token to retrieve the next page of results. * Pass to * [ListRagFilesRequest.page_token][google.cloud.aiplatform.v1.ListRagFilesRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < ragFiles_.size(); i++) { output.writeMessage(1, ragFiles_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < ragFiles_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, ragFiles_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1.ListRagFilesResponse)) { return super.equals(obj); } com.google.cloud.aiplatform.v1.ListRagFilesResponse other = (com.google.cloud.aiplatform.v1.ListRagFilesResponse) obj; if (!getRagFilesList().equals(other.getRagFilesList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getRagFilesCount() > 0) { hash = (37 * hash) + RAG_FILES_FIELD_NUMBER; hash = (53 * hash) + getRagFilesList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1.ListRagFilesResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.ListRagFilesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ListRagFilesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.ListRagFilesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ListRagFilesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.ListRagFilesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ListRagFilesResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.ListRagFilesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ListRagFilesResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.ListRagFilesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ListRagFilesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.ListRagFilesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.aiplatform.v1.ListRagFilesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for * [VertexRagDataService.ListRagFiles][google.cloud.aiplatform.v1.VertexRagDataService.ListRagFiles]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.ListRagFilesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.ListRagFilesResponse) com.google.cloud.aiplatform.v1.ListRagFilesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.VertexRagDataServiceProto .internal_static_google_cloud_aiplatform_v1_ListRagFilesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.VertexRagDataServiceProto .internal_static_google_cloud_aiplatform_v1_ListRagFilesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.ListRagFilesResponse.class, com.google.cloud.aiplatform.v1.ListRagFilesResponse.Builder.class); } // Construct using com.google.cloud.aiplatform.v1.ListRagFilesResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (ragFilesBuilder_ == null) { ragFiles_ = java.util.Collections.emptyList(); } else { ragFiles_ = null; ragFilesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1.VertexRagDataServiceProto .internal_static_google_cloud_aiplatform_v1_ListRagFilesResponse_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1.ListRagFilesResponse getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1.ListRagFilesResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1.ListRagFilesResponse build() { com.google.cloud.aiplatform.v1.ListRagFilesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1.ListRagFilesResponse buildPartial() { com.google.cloud.aiplatform.v1.ListRagFilesResponse result = new com.google.cloud.aiplatform.v1.ListRagFilesResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.aiplatform.v1.ListRagFilesResponse result) { if (ragFilesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { ragFiles_ = java.util.Collections.unmodifiableList(ragFiles_); bitField0_ = (bitField0_ & ~0x00000001); } result.ragFiles_ = ragFiles_; } else { result.ragFiles_ = ragFilesBuilder_.build(); } } private void buildPartial0(com.google.cloud.aiplatform.v1.ListRagFilesResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1.ListRagFilesResponse) { return mergeFrom((com.google.cloud.aiplatform.v1.ListRagFilesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1.ListRagFilesResponse other) { if (other == com.google.cloud.aiplatform.v1.ListRagFilesResponse.getDefaultInstance()) return this; if (ragFilesBuilder_ == null) { if (!other.ragFiles_.isEmpty()) { if (ragFiles_.isEmpty()) { ragFiles_ = other.ragFiles_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureRagFilesIsMutable(); ragFiles_.addAll(other.ragFiles_); } onChanged(); } } else { if (!other.ragFiles_.isEmpty()) { if (ragFilesBuilder_.isEmpty()) { ragFilesBuilder_.dispose(); ragFilesBuilder_ = null; ragFiles_ = other.ragFiles_; bitField0_ = (bitField0_ & ~0x00000001); ragFilesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRagFilesFieldBuilder() : null; } else { ragFilesBuilder_.addAllMessages(other.ragFiles_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.aiplatform.v1.RagFile m = input.readMessage( com.google.cloud.aiplatform.v1.RagFile.parser(), extensionRegistry); if (ragFilesBuilder_ == null) { ensureRagFilesIsMutable(); ragFiles_.add(m); } else { ragFilesBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.aiplatform.v1.RagFile> ragFiles_ = java.util.Collections.emptyList(); private void ensureRagFilesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { ragFiles_ = new java.util.ArrayList<com.google.cloud.aiplatform.v1.RagFile>(ragFiles_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1.RagFile, com.google.cloud.aiplatform.v1.RagFile.Builder, com.google.cloud.aiplatform.v1.RagFileOrBuilder> ragFilesBuilder_; /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.RagFile rag_files = 1;</code> */ public java.util.List<com.google.cloud.aiplatform.v1.RagFile> getRagFilesList() { if (ragFilesBuilder_ == null) { return java.util.Collections.unmodifiableList(ragFiles_); } else { return ragFilesBuilder_.getMessageList(); } } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.RagFile rag_files = 1;</code> */ public int getRagFilesCount() { if (ragFilesBuilder_ == null) { return ragFiles_.size(); } else { return ragFilesBuilder_.getCount(); } } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.RagFile rag_files = 1;</code> */ public com.google.cloud.aiplatform.v1.RagFile getRagFiles(int index) { if (ragFilesBuilder_ == null) { return ragFiles_.get(index); } else { return ragFilesBuilder_.getMessage(index); } } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.RagFile rag_files = 1;</code> */ public Builder setRagFiles(int index, com.google.cloud.aiplatform.v1.RagFile value) { if (ragFilesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRagFilesIsMutable(); ragFiles_.set(index, value); onChanged(); } else { ragFilesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.RagFile rag_files = 1;</code> */ public Builder setRagFiles( int index, com.google.cloud.aiplatform.v1.RagFile.Builder builderForValue) { if (ragFilesBuilder_ == null) { ensureRagFilesIsMutable(); ragFiles_.set(index, builderForValue.build()); onChanged(); } else { ragFilesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.RagFile rag_files = 1;</code> */ public Builder addRagFiles(com.google.cloud.aiplatform.v1.RagFile value) { if (ragFilesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRagFilesIsMutable(); ragFiles_.add(value); onChanged(); } else { ragFilesBuilder_.addMessage(value); } return this; } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.RagFile rag_files = 1;</code> */ public Builder addRagFiles(int index, com.google.cloud.aiplatform.v1.RagFile value) { if (ragFilesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRagFilesIsMutable(); ragFiles_.add(index, value); onChanged(); } else { ragFilesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.RagFile rag_files = 1;</code> */ public Builder addRagFiles(com.google.cloud.aiplatform.v1.RagFile.Builder builderForValue) { if (ragFilesBuilder_ == null) { ensureRagFilesIsMutable(); ragFiles_.add(builderForValue.build()); onChanged(); } else { ragFilesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.RagFile rag_files = 1;</code> */ public Builder addRagFiles( int index, com.google.cloud.aiplatform.v1.RagFile.Builder builderForValue) { if (ragFilesBuilder_ == null) { ensureRagFilesIsMutable(); ragFiles_.add(index, builderForValue.build()); onChanged(); } else { ragFilesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.RagFile rag_files = 1;</code> */ public Builder addAllRagFiles( java.lang.Iterable<? extends com.google.cloud.aiplatform.v1.RagFile> values) { if (ragFilesBuilder_ == null) { ensureRagFilesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, ragFiles_); onChanged(); } else { ragFilesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.RagFile rag_files = 1;</code> */ public Builder clearRagFiles() { if (ragFilesBuilder_ == null) { ragFiles_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { ragFilesBuilder_.clear(); } return this; } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.RagFile rag_files = 1;</code> */ public Builder removeRagFiles(int index) { if (ragFilesBuilder_ == null) { ensureRagFilesIsMutable(); ragFiles_.remove(index); onChanged(); } else { ragFilesBuilder_.remove(index); } return this; } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.RagFile rag_files = 1;</code> */ public com.google.cloud.aiplatform.v1.RagFile.Builder getRagFilesBuilder(int index) { return getRagFilesFieldBuilder().getBuilder(index); } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.RagFile rag_files = 1;</code> */ public com.google.cloud.aiplatform.v1.RagFileOrBuilder getRagFilesOrBuilder(int index) { if (ragFilesBuilder_ == null) { return ragFiles_.get(index); } else { return ragFilesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.RagFile rag_files = 1;</code> */ public java.util.List<? extends com.google.cloud.aiplatform.v1.RagFileOrBuilder> getRagFilesOrBuilderList() { if (ragFilesBuilder_ != null) { return ragFilesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(ragFiles_); } } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.RagFile rag_files = 1;</code> */ public com.google.cloud.aiplatform.v1.RagFile.Builder addRagFilesBuilder() { return getRagFilesFieldBuilder() .addBuilder(com.google.cloud.aiplatform.v1.RagFile.getDefaultInstance()); } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.RagFile rag_files = 1;</code> */ public com.google.cloud.aiplatform.v1.RagFile.Builder addRagFilesBuilder(int index) { return getRagFilesFieldBuilder() .addBuilder(index, com.google.cloud.aiplatform.v1.RagFile.getDefaultInstance()); } /** * * * <pre> * List of RagFiles in the requested page. * </pre> * * <code>repeated .google.cloud.aiplatform.v1.RagFile rag_files = 1;</code> */ public java.util.List<com.google.cloud.aiplatform.v1.RagFile.Builder> getRagFilesBuilderList() { return getRagFilesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1.RagFile, com.google.cloud.aiplatform.v1.RagFile.Builder, com.google.cloud.aiplatform.v1.RagFileOrBuilder> getRagFilesFieldBuilder() { if (ragFilesBuilder_ == null) { ragFilesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.aiplatform.v1.RagFile, com.google.cloud.aiplatform.v1.RagFile.Builder, com.google.cloud.aiplatform.v1.RagFileOrBuilder>( ragFiles_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); ragFiles_ = null; } return ragFilesBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve the next page of results. * Pass to * [ListRagFilesRequest.page_token][google.cloud.aiplatform.v1.ListRagFilesRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token to retrieve the next page of results. * Pass to * [ListRagFilesRequest.page_token][google.cloud.aiplatform.v1.ListRagFilesRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token to retrieve the next page of results. * Pass to * [ListRagFilesRequest.page_token][google.cloud.aiplatform.v1.ListRagFilesRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token to retrieve the next page of results. * Pass to * [ListRagFilesRequest.page_token][google.cloud.aiplatform.v1.ListRagFilesRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token to retrieve the next page of results. * Pass to * [ListRagFilesRequest.page_token][google.cloud.aiplatform.v1.ListRagFilesRequest.page_token] * to obtain that page. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.ListRagFilesResponse) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.ListRagFilesResponse) private static final com.google.cloud.aiplatform.v1.ListRagFilesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.ListRagFilesResponse(); } public static com.google.cloud.aiplatform.v1.ListRagFilesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListRagFilesResponse> PARSER = new com.google.protobuf.AbstractParser<ListRagFilesResponse>() { @java.lang.Override public ListRagFilesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListRagFilesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListRagFilesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1.ListRagFilesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,573
java-dialogflow-cx/proto-google-cloud-dialogflow-cx-v3beta1/src/main/java/com/google/cloud/dialogflow/cx/v3beta1/UpdateGenerativeSettingsRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dialogflow/cx/v3beta1/agent.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dialogflow.cx.v3beta1; /** * * * <pre> * Request for * [UpdateGenerativeSettings][google.cloud.dialogflow.cx.v3beta1.Agents.UpdateGenerativeSettings] * RPC. * </pre> * * Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest} */ public final class UpdateGenerativeSettingsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest) UpdateGenerativeSettingsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateGenerativeSettingsRequest.newBuilder() to construct. private UpdateGenerativeSettingsRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateGenerativeSettingsRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateGenerativeSettingsRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.cx.v3beta1.AgentProto .internal_static_google_cloud_dialogflow_cx_v3beta1_UpdateGenerativeSettingsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.cx.v3beta1.AgentProto .internal_static_google_cloud_dialogflow_cx_v3beta1_UpdateGenerativeSettingsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest.class, com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest.Builder.class); } private int bitField0_; public static final int GENERATIVE_SETTINGS_FIELD_NUMBER = 1; private com.google.cloud.dialogflow.cx.v3beta1.GenerativeSettings generativeSettings_; /** * * * <pre> * Required. Generative settings to update. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3beta1.GenerativeSettings generative_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the generativeSettings field is set. */ @java.lang.Override public boolean hasGenerativeSettings() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Generative settings to update. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3beta1.GenerativeSettings generative_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The generativeSettings. */ @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.GenerativeSettings getGenerativeSettings() { return generativeSettings_ == null ? com.google.cloud.dialogflow.cx.v3beta1.GenerativeSettings.getDefaultInstance() : generativeSettings_; } /** * * * <pre> * Required. Generative settings to update. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3beta1.GenerativeSettings generative_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.GenerativeSettingsOrBuilder getGenerativeSettingsOrBuilder() { return generativeSettings_ == null ? com.google.cloud.dialogflow.cx.v3beta1.GenerativeSettings.getDefaultInstance() : generativeSettings_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Optional. The mask to control which fields get updated. If the mask is not * present, all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. The mask to control which fields get updated. If the mask is not * present, all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Optional. The mask to control which fields get updated. If the mask is not * present, all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getGenerativeSettings()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getGenerativeSettings()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest)) { return super.equals(obj); } com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest other = (com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest) obj; if (hasGenerativeSettings() != other.hasGenerativeSettings()) return false; if (hasGenerativeSettings()) { if (!getGenerativeSettings().equals(other.getGenerativeSettings())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasGenerativeSettings()) { hash = (37 * hash) + GENERATIVE_SETTINGS_FIELD_NUMBER; hash = (53 * hash) + getGenerativeSettings().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request for * [UpdateGenerativeSettings][google.cloud.dialogflow.cx.v3beta1.Agents.UpdateGenerativeSettings] * RPC. * </pre> * * Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest) com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.cx.v3beta1.AgentProto .internal_static_google_cloud_dialogflow_cx_v3beta1_UpdateGenerativeSettingsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.cx.v3beta1.AgentProto .internal_static_google_cloud_dialogflow_cx_v3beta1_UpdateGenerativeSettingsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest.class, com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest.Builder.class); } // Construct using // com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getGenerativeSettingsFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; generativeSettings_ = null; if (generativeSettingsBuilder_ != null) { generativeSettingsBuilder_.dispose(); generativeSettingsBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dialogflow.cx.v3beta1.AgentProto .internal_static_google_cloud_dialogflow_cx_v3beta1_UpdateGenerativeSettingsRequest_descriptor; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest getDefaultInstanceForType() { return com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest build() { com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest buildPartial() { com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest result = new com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.generativeSettings_ = generativeSettingsBuilder_ == null ? generativeSettings_ : generativeSettingsBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest) { return mergeFrom( (com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest other) { if (other == com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest .getDefaultInstance()) return this; if (other.hasGenerativeSettings()) { mergeGenerativeSettings(other.getGenerativeSettings()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getGenerativeSettingsFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.dialogflow.cx.v3beta1.GenerativeSettings generativeSettings_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.cx.v3beta1.GenerativeSettings, com.google.cloud.dialogflow.cx.v3beta1.GenerativeSettings.Builder, com.google.cloud.dialogflow.cx.v3beta1.GenerativeSettingsOrBuilder> generativeSettingsBuilder_; /** * * * <pre> * Required. Generative settings to update. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3beta1.GenerativeSettings generative_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the generativeSettings field is set. */ public boolean hasGenerativeSettings() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Generative settings to update. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3beta1.GenerativeSettings generative_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The generativeSettings. */ public com.google.cloud.dialogflow.cx.v3beta1.GenerativeSettings getGenerativeSettings() { if (generativeSettingsBuilder_ == null) { return generativeSettings_ == null ? com.google.cloud.dialogflow.cx.v3beta1.GenerativeSettings.getDefaultInstance() : generativeSettings_; } else { return generativeSettingsBuilder_.getMessage(); } } /** * * * <pre> * Required. Generative settings to update. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3beta1.GenerativeSettings generative_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setGenerativeSettings( com.google.cloud.dialogflow.cx.v3beta1.GenerativeSettings value) { if (generativeSettingsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } generativeSettings_ = value; } else { generativeSettingsBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Generative settings to update. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3beta1.GenerativeSettings generative_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setGenerativeSettings( com.google.cloud.dialogflow.cx.v3beta1.GenerativeSettings.Builder builderForValue) { if (generativeSettingsBuilder_ == null) { generativeSettings_ = builderForValue.build(); } else { generativeSettingsBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Generative settings to update. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3beta1.GenerativeSettings generative_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeGenerativeSettings( com.google.cloud.dialogflow.cx.v3beta1.GenerativeSettings value) { if (generativeSettingsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && generativeSettings_ != null && generativeSettings_ != com.google.cloud.dialogflow.cx.v3beta1.GenerativeSettings.getDefaultInstance()) { getGenerativeSettingsBuilder().mergeFrom(value); } else { generativeSettings_ = value; } } else { generativeSettingsBuilder_.mergeFrom(value); } if (generativeSettings_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. Generative settings to update. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3beta1.GenerativeSettings generative_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearGenerativeSettings() { bitField0_ = (bitField0_ & ~0x00000001); generativeSettings_ = null; if (generativeSettingsBuilder_ != null) { generativeSettingsBuilder_.dispose(); generativeSettingsBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. Generative settings to update. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3beta1.GenerativeSettings generative_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dialogflow.cx.v3beta1.GenerativeSettings.Builder getGenerativeSettingsBuilder() { bitField0_ |= 0x00000001; onChanged(); return getGenerativeSettingsFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Generative settings to update. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3beta1.GenerativeSettings generative_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dialogflow.cx.v3beta1.GenerativeSettingsOrBuilder getGenerativeSettingsOrBuilder() { if (generativeSettingsBuilder_ != null) { return generativeSettingsBuilder_.getMessageOrBuilder(); } else { return generativeSettings_ == null ? com.google.cloud.dialogflow.cx.v3beta1.GenerativeSettings.getDefaultInstance() : generativeSettings_; } } /** * * * <pre> * Required. Generative settings to update. * </pre> * * <code> * .google.cloud.dialogflow.cx.v3beta1.GenerativeSettings generative_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.cx.v3beta1.GenerativeSettings, com.google.cloud.dialogflow.cx.v3beta1.GenerativeSettings.Builder, com.google.cloud.dialogflow.cx.v3beta1.GenerativeSettingsOrBuilder> getGenerativeSettingsFieldBuilder() { if (generativeSettingsBuilder_ == null) { generativeSettingsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.cx.v3beta1.GenerativeSettings, com.google.cloud.dialogflow.cx.v3beta1.GenerativeSettings.Builder, com.google.cloud.dialogflow.cx.v3beta1.GenerativeSettingsOrBuilder>( getGenerativeSettings(), getParentForChildren(), isClean()); generativeSettings_ = null; } return generativeSettingsBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Optional. The mask to control which fields get updated. If the mask is not * present, all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. The mask to control which fields get updated. If the mask is not * present, all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Optional. The mask to control which fields get updated. If the mask is not * present, all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. The mask to control which fields get updated. If the mask is not * present, all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. The mask to control which fields get updated. If the mask is not * present, all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Optional. The mask to control which fields get updated. If the mask is not * present, all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Optional. The mask to control which fields get updated. If the mask is not * present, all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Optional. The mask to control which fields get updated. If the mask is not * present, all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Optional. The mask to control which fields get updated. If the mask is not * present, all fields will be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest) } // @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest) private static final com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest(); } public static com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateGenerativeSettingsRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateGenerativeSettingsRequest>() { @java.lang.Override public UpdateGenerativeSettingsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateGenerativeSettingsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateGenerativeSettingsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.UpdateGenerativeSettingsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,810
java-compute/google-cloud-compute/src/main/java/com/google/cloud/compute/v1/stub/HttpJsonHealthChecksStub.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.compute.v1.stub; import static com.google.cloud.compute.v1.HealthChecksClient.AggregatedListPagedResponse; import static com.google.cloud.compute.v1.HealthChecksClient.ListPagedResponse; import com.google.api.core.InternalApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.httpjson.ApiMethodDescriptor; import com.google.api.gax.httpjson.HttpJsonCallSettings; import com.google.api.gax.httpjson.HttpJsonOperationSnapshot; import com.google.api.gax.httpjson.HttpJsonStubCallableFactory; import com.google.api.gax.httpjson.ProtoMessageRequestFormatter; import com.google.api.gax.httpjson.ProtoMessageResponseParser; import com.google.api.gax.httpjson.ProtoRestSerializer; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.OperationCallable; import com.google.api.gax.rpc.RequestParamsBuilder; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.compute.v1.AggregatedListHealthChecksRequest; import com.google.cloud.compute.v1.DeleteHealthCheckRequest; import com.google.cloud.compute.v1.GetHealthCheckRequest; import com.google.cloud.compute.v1.HealthCheck; import com.google.cloud.compute.v1.HealthCheckList; import com.google.cloud.compute.v1.HealthChecksAggregatedList; import com.google.cloud.compute.v1.InsertHealthCheckRequest; import com.google.cloud.compute.v1.ListHealthChecksRequest; import com.google.cloud.compute.v1.Operation; import com.google.cloud.compute.v1.Operation.Status; import com.google.cloud.compute.v1.PatchHealthCheckRequest; import com.google.cloud.compute.v1.UpdateHealthCheckRequest; import com.google.protobuf.TypeRegistry; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * REST stub implementation for the HealthChecks service API. * * <p>This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator-java") public class HttpJsonHealthChecksStub extends HealthChecksStub { private static final TypeRegistry typeRegistry = TypeRegistry.newBuilder().add(Operation.getDescriptor()).build(); private static final ApiMethodDescriptor< AggregatedListHealthChecksRequest, HealthChecksAggregatedList> aggregatedListMethodDescriptor = ApiMethodDescriptor .<AggregatedListHealthChecksRequest, HealthChecksAggregatedList>newBuilder() .setFullMethodName("google.cloud.compute.v1.HealthChecks/AggregatedList") .setHttpMethod("GET") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<AggregatedListHealthChecksRequest>newBuilder() .setPath( "/compute/v1/projects/{project}/aggregated/healthChecks", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<AggregatedListHealthChecksRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "project", request.getProject()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<AggregatedListHealthChecksRequest> serializer = ProtoRestSerializer.create(); if (request.hasFilter()) { serializer.putQueryParam(fields, "filter", request.getFilter()); } if (request.hasIncludeAllScopes()) { serializer.putQueryParam( fields, "includeAllScopes", request.getIncludeAllScopes()); } if (request.hasMaxResults()) { serializer.putQueryParam( fields, "maxResults", request.getMaxResults()); } if (request.hasOrderBy()) { serializer.putQueryParam(fields, "orderBy", request.getOrderBy()); } if (request.hasPageToken()) { serializer.putQueryParam(fields, "pageToken", request.getPageToken()); } if (request.hasReturnPartialSuccess()) { serializer.putQueryParam( fields, "returnPartialSuccess", request.getReturnPartialSuccess()); } if (request.hasServiceProjectNumber()) { serializer.putQueryParam( fields, "serviceProjectNumber", request.getServiceProjectNumber()); } return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<HealthChecksAggregatedList>newBuilder() .setDefaultInstance(HealthChecksAggregatedList.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<DeleteHealthCheckRequest, Operation> deleteMethodDescriptor = ApiMethodDescriptor.<DeleteHealthCheckRequest, Operation>newBuilder() .setFullMethodName("google.cloud.compute.v1.HealthChecks/Delete") .setHttpMethod("DELETE") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<DeleteHealthCheckRequest>newBuilder() .setPath( "/compute/v1/projects/{project}/global/healthChecks/{healthCheck}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<DeleteHealthCheckRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam( fields, "healthCheck", request.getHealthCheck()); serializer.putPathParam(fields, "project", request.getProject()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<DeleteHealthCheckRequest> serializer = ProtoRestSerializer.create(); if (request.hasRequestId()) { serializer.putQueryParam(fields, "requestId", request.getRequestId()); } return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<Operation>newBuilder() .setDefaultInstance(Operation.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .setOperationSnapshotFactory( (DeleteHealthCheckRequest request, Operation response) -> { StringBuilder opName = new StringBuilder(response.getName()); opName.append(":").append(request.getProject()); return HttpJsonOperationSnapshot.newBuilder() .setName(opName.toString()) .setMetadata(response) .setDone(Status.DONE.equals(response.getStatus())) .setResponse(response) .setError(response.getHttpErrorStatusCode(), response.getHttpErrorMessage()) .build(); }) .build(); private static final ApiMethodDescriptor<GetHealthCheckRequest, HealthCheck> getMethodDescriptor = ApiMethodDescriptor.<GetHealthCheckRequest, HealthCheck>newBuilder() .setFullMethodName("google.cloud.compute.v1.HealthChecks/Get") .setHttpMethod("GET") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<GetHealthCheckRequest>newBuilder() .setPath( "/compute/v1/projects/{project}/global/healthChecks/{healthCheck}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<GetHealthCheckRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "healthCheck", request.getHealthCheck()); serializer.putPathParam(fields, "project", request.getProject()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<GetHealthCheckRequest> serializer = ProtoRestSerializer.create(); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<HealthCheck>newBuilder() .setDefaultInstance(HealthCheck.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<InsertHealthCheckRequest, Operation> insertMethodDescriptor = ApiMethodDescriptor.<InsertHealthCheckRequest, Operation>newBuilder() .setFullMethodName("google.cloud.compute.v1.HealthChecks/Insert") .setHttpMethod("POST") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<InsertHealthCheckRequest>newBuilder() .setPath( "/compute/v1/projects/{project}/global/healthChecks", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<InsertHealthCheckRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "project", request.getProject()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<InsertHealthCheckRequest> serializer = ProtoRestSerializer.create(); if (request.hasRequestId()) { serializer.putQueryParam(fields, "requestId", request.getRequestId()); } return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody( "healthCheckResource", request.getHealthCheckResource(), false)) .build()) .setResponseParser( ProtoMessageResponseParser.<Operation>newBuilder() .setDefaultInstance(Operation.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .setOperationSnapshotFactory( (InsertHealthCheckRequest request, Operation response) -> { StringBuilder opName = new StringBuilder(response.getName()); opName.append(":").append(request.getProject()); return HttpJsonOperationSnapshot.newBuilder() .setName(opName.toString()) .setMetadata(response) .setDone(Status.DONE.equals(response.getStatus())) .setResponse(response) .setError(response.getHttpErrorStatusCode(), response.getHttpErrorMessage()) .build(); }) .build(); private static final ApiMethodDescriptor<ListHealthChecksRequest, HealthCheckList> listMethodDescriptor = ApiMethodDescriptor.<ListHealthChecksRequest, HealthCheckList>newBuilder() .setFullMethodName("google.cloud.compute.v1.HealthChecks/List") .setHttpMethod("GET") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<ListHealthChecksRequest>newBuilder() .setPath( "/compute/v1/projects/{project}/global/healthChecks", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<ListHealthChecksRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "project", request.getProject()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<ListHealthChecksRequest> serializer = ProtoRestSerializer.create(); if (request.hasFilter()) { serializer.putQueryParam(fields, "filter", request.getFilter()); } if (request.hasMaxResults()) { serializer.putQueryParam( fields, "maxResults", request.getMaxResults()); } if (request.hasOrderBy()) { serializer.putQueryParam(fields, "orderBy", request.getOrderBy()); } if (request.hasPageToken()) { serializer.putQueryParam(fields, "pageToken", request.getPageToken()); } if (request.hasReturnPartialSuccess()) { serializer.putQueryParam( fields, "returnPartialSuccess", request.getReturnPartialSuccess()); } return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<HealthCheckList>newBuilder() .setDefaultInstance(HealthCheckList.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<PatchHealthCheckRequest, Operation> patchMethodDescriptor = ApiMethodDescriptor.<PatchHealthCheckRequest, Operation>newBuilder() .setFullMethodName("google.cloud.compute.v1.HealthChecks/Patch") .setHttpMethod("PATCH") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<PatchHealthCheckRequest>newBuilder() .setPath( "/compute/v1/projects/{project}/global/healthChecks/{healthCheck}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<PatchHealthCheckRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam( fields, "healthCheck", request.getHealthCheck()); serializer.putPathParam(fields, "project", request.getProject()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<PatchHealthCheckRequest> serializer = ProtoRestSerializer.create(); if (request.hasRequestId()) { serializer.putQueryParam(fields, "requestId", request.getRequestId()); } return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody( "healthCheckResource", request.getHealthCheckResource(), false)) .build()) .setResponseParser( ProtoMessageResponseParser.<Operation>newBuilder() .setDefaultInstance(Operation.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .setOperationSnapshotFactory( (PatchHealthCheckRequest request, Operation response) -> { StringBuilder opName = new StringBuilder(response.getName()); opName.append(":").append(request.getProject()); return HttpJsonOperationSnapshot.newBuilder() .setName(opName.toString()) .setMetadata(response) .setDone(Status.DONE.equals(response.getStatus())) .setResponse(response) .setError(response.getHttpErrorStatusCode(), response.getHttpErrorMessage()) .build(); }) .build(); private static final ApiMethodDescriptor<UpdateHealthCheckRequest, Operation> updateMethodDescriptor = ApiMethodDescriptor.<UpdateHealthCheckRequest, Operation>newBuilder() .setFullMethodName("google.cloud.compute.v1.HealthChecks/Update") .setHttpMethod("PUT") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<UpdateHealthCheckRequest>newBuilder() .setPath( "/compute/v1/projects/{project}/global/healthChecks/{healthCheck}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<UpdateHealthCheckRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam( fields, "healthCheck", request.getHealthCheck()); serializer.putPathParam(fields, "project", request.getProject()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<UpdateHealthCheckRequest> serializer = ProtoRestSerializer.create(); if (request.hasRequestId()) { serializer.putQueryParam(fields, "requestId", request.getRequestId()); } return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody( "healthCheckResource", request.getHealthCheckResource(), false)) .build()) .setResponseParser( ProtoMessageResponseParser.<Operation>newBuilder() .setDefaultInstance(Operation.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .setOperationSnapshotFactory( (UpdateHealthCheckRequest request, Operation response) -> { StringBuilder opName = new StringBuilder(response.getName()); opName.append(":").append(request.getProject()); return HttpJsonOperationSnapshot.newBuilder() .setName(opName.toString()) .setMetadata(response) .setDone(Status.DONE.equals(response.getStatus())) .setResponse(response) .setError(response.getHttpErrorStatusCode(), response.getHttpErrorMessage()) .build(); }) .build(); private final UnaryCallable<AggregatedListHealthChecksRequest, HealthChecksAggregatedList> aggregatedListCallable; private final UnaryCallable<AggregatedListHealthChecksRequest, AggregatedListPagedResponse> aggregatedListPagedCallable; private final UnaryCallable<DeleteHealthCheckRequest, Operation> deleteCallable; private final OperationCallable<DeleteHealthCheckRequest, Operation, Operation> deleteOperationCallable; private final UnaryCallable<GetHealthCheckRequest, HealthCheck> getCallable; private final UnaryCallable<InsertHealthCheckRequest, Operation> insertCallable; private final OperationCallable<InsertHealthCheckRequest, Operation, Operation> insertOperationCallable; private final UnaryCallable<ListHealthChecksRequest, HealthCheckList> listCallable; private final UnaryCallable<ListHealthChecksRequest, ListPagedResponse> listPagedCallable; private final UnaryCallable<PatchHealthCheckRequest, Operation> patchCallable; private final OperationCallable<PatchHealthCheckRequest, Operation, Operation> patchOperationCallable; private final UnaryCallable<UpdateHealthCheckRequest, Operation> updateCallable; private final OperationCallable<UpdateHealthCheckRequest, Operation, Operation> updateOperationCallable; private final BackgroundResource backgroundResources; private final HttpJsonGlobalOperationsStub httpJsonOperationsStub; private final HttpJsonStubCallableFactory callableFactory; public static final HttpJsonHealthChecksStub create(HealthChecksStubSettings settings) throws IOException { return new HttpJsonHealthChecksStub(settings, ClientContext.create(settings)); } public static final HttpJsonHealthChecksStub create(ClientContext clientContext) throws IOException { return new HttpJsonHealthChecksStub( HealthChecksStubSettings.newBuilder().build(), clientContext); } public static final HttpJsonHealthChecksStub create( ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException { return new HttpJsonHealthChecksStub( HealthChecksStubSettings.newBuilder().build(), clientContext, callableFactory); } /** * Constructs an instance of HttpJsonHealthChecksStub, using the given settings. This is protected * so that it is easy to make a subclass, but otherwise, the static factory methods should be * preferred. */ protected HttpJsonHealthChecksStub(HealthChecksStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new HttpJsonHealthChecksCallableFactory()); } /** * Constructs an instance of HttpJsonHealthChecksStub, using the given settings. This is protected * so that it is easy to make a subclass, but otherwise, the static factory methods should be * preferred. */ protected HttpJsonHealthChecksStub( HealthChecksStubSettings settings, ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; this.httpJsonOperationsStub = HttpJsonGlobalOperationsStub.create(clientContext, callableFactory); HttpJsonCallSettings<AggregatedListHealthChecksRequest, HealthChecksAggregatedList> aggregatedListTransportSettings = HttpJsonCallSettings .<AggregatedListHealthChecksRequest, HealthChecksAggregatedList>newBuilder() .setMethodDescriptor(aggregatedListMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("project", String.valueOf(request.getProject())); return builder.build(); }) .build(); HttpJsonCallSettings<DeleteHealthCheckRequest, Operation> deleteTransportSettings = HttpJsonCallSettings.<DeleteHealthCheckRequest, Operation>newBuilder() .setMethodDescriptor(deleteMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("health_check", String.valueOf(request.getHealthCheck())); builder.add("project", String.valueOf(request.getProject())); return builder.build(); }) .build(); HttpJsonCallSettings<GetHealthCheckRequest, HealthCheck> getTransportSettings = HttpJsonCallSettings.<GetHealthCheckRequest, HealthCheck>newBuilder() .setMethodDescriptor(getMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("health_check", String.valueOf(request.getHealthCheck())); builder.add("project", String.valueOf(request.getProject())); return builder.build(); }) .build(); HttpJsonCallSettings<InsertHealthCheckRequest, Operation> insertTransportSettings = HttpJsonCallSettings.<InsertHealthCheckRequest, Operation>newBuilder() .setMethodDescriptor(insertMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("project", String.valueOf(request.getProject())); return builder.build(); }) .build(); HttpJsonCallSettings<ListHealthChecksRequest, HealthCheckList> listTransportSettings = HttpJsonCallSettings.<ListHealthChecksRequest, HealthCheckList>newBuilder() .setMethodDescriptor(listMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("project", String.valueOf(request.getProject())); return builder.build(); }) .build(); HttpJsonCallSettings<PatchHealthCheckRequest, Operation> patchTransportSettings = HttpJsonCallSettings.<PatchHealthCheckRequest, Operation>newBuilder() .setMethodDescriptor(patchMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("health_check", String.valueOf(request.getHealthCheck())); builder.add("project", String.valueOf(request.getProject())); return builder.build(); }) .build(); HttpJsonCallSettings<UpdateHealthCheckRequest, Operation> updateTransportSettings = HttpJsonCallSettings.<UpdateHealthCheckRequest, Operation>newBuilder() .setMethodDescriptor(updateMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("health_check", String.valueOf(request.getHealthCheck())); builder.add("project", String.valueOf(request.getProject())); return builder.build(); }) .build(); this.aggregatedListCallable = callableFactory.createUnaryCallable( aggregatedListTransportSettings, settings.aggregatedListSettings(), clientContext); this.aggregatedListPagedCallable = callableFactory.createPagedCallable( aggregatedListTransportSettings, settings.aggregatedListSettings(), clientContext); this.deleteCallable = callableFactory.createUnaryCallable( deleteTransportSettings, settings.deleteSettings(), clientContext); this.deleteOperationCallable = callableFactory.createOperationCallable( deleteTransportSettings, settings.deleteOperationSettings(), clientContext, httpJsonOperationsStub); this.getCallable = callableFactory.createUnaryCallable( getTransportSettings, settings.getSettings(), clientContext); this.insertCallable = callableFactory.createUnaryCallable( insertTransportSettings, settings.insertSettings(), clientContext); this.insertOperationCallable = callableFactory.createOperationCallable( insertTransportSettings, settings.insertOperationSettings(), clientContext, httpJsonOperationsStub); this.listCallable = callableFactory.createUnaryCallable( listTransportSettings, settings.listSettings(), clientContext); this.listPagedCallable = callableFactory.createPagedCallable( listTransportSettings, settings.listSettings(), clientContext); this.patchCallable = callableFactory.createUnaryCallable( patchTransportSettings, settings.patchSettings(), clientContext); this.patchOperationCallable = callableFactory.createOperationCallable( patchTransportSettings, settings.patchOperationSettings(), clientContext, httpJsonOperationsStub); this.updateCallable = callableFactory.createUnaryCallable( updateTransportSettings, settings.updateSettings(), clientContext); this.updateOperationCallable = callableFactory.createOperationCallable( updateTransportSettings, settings.updateOperationSettings(), clientContext, httpJsonOperationsStub); this.backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } @InternalApi public static List<ApiMethodDescriptor> getMethodDescriptors() { List<ApiMethodDescriptor> methodDescriptors = new ArrayList<>(); methodDescriptors.add(aggregatedListMethodDescriptor); methodDescriptors.add(deleteMethodDescriptor); methodDescriptors.add(getMethodDescriptor); methodDescriptors.add(insertMethodDescriptor); methodDescriptors.add(listMethodDescriptor); methodDescriptors.add(patchMethodDescriptor); methodDescriptors.add(updateMethodDescriptor); return methodDescriptors; } @Override public UnaryCallable<AggregatedListHealthChecksRequest, HealthChecksAggregatedList> aggregatedListCallable() { return aggregatedListCallable; } @Override public UnaryCallable<AggregatedListHealthChecksRequest, AggregatedListPagedResponse> aggregatedListPagedCallable() { return aggregatedListPagedCallable; } @Override public UnaryCallable<DeleteHealthCheckRequest, Operation> deleteCallable() { return deleteCallable; } @Override public OperationCallable<DeleteHealthCheckRequest, Operation, Operation> deleteOperationCallable() { return deleteOperationCallable; } @Override public UnaryCallable<GetHealthCheckRequest, HealthCheck> getCallable() { return getCallable; } @Override public UnaryCallable<InsertHealthCheckRequest, Operation> insertCallable() { return insertCallable; } @Override public OperationCallable<InsertHealthCheckRequest, Operation, Operation> insertOperationCallable() { return insertOperationCallable; } @Override public UnaryCallable<ListHealthChecksRequest, HealthCheckList> listCallable() { return listCallable; } @Override public UnaryCallable<ListHealthChecksRequest, ListPagedResponse> listPagedCallable() { return listPagedCallable; } @Override public UnaryCallable<PatchHealthCheckRequest, Operation> patchCallable() { return patchCallable; } @Override public OperationCallable<PatchHealthCheckRequest, Operation, Operation> patchOperationCallable() { return patchOperationCallable; } @Override public UnaryCallable<UpdateHealthCheckRequest, Operation> updateCallable() { return updateCallable; } @Override public OperationCallable<UpdateHealthCheckRequest, Operation, Operation> updateOperationCallable() { return updateOperationCallable; } @Override public final void close() { try { backgroundResources.close(); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new IllegalStateException("Failed to close resource", e); } } @Override public void shutdown() { backgroundResources.shutdown(); } @Override public boolean isShutdown() { return backgroundResources.isShutdown(); } @Override public boolean isTerminated() { return backgroundResources.isTerminated(); } @Override public void shutdownNow() { backgroundResources.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return backgroundResources.awaitTermination(duration, unit); } }
apache/royale-compiler
36,553
compiler-jx/src/test/java/org/apache/royale/compiler/internal/codegen/js/sourcemaps/TestSourceMapExpressions.java
/* * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.royale.compiler.internal.codegen.js.sourcemaps; import org.apache.royale.compiler.driver.IBackend; import org.apache.royale.compiler.internal.driver.js.royale.RoyaleBackend; import org.apache.royale.compiler.internal.test.SourceMapTestBase; import org.apache.royale.compiler.internal.tree.as.ArrayLiteralNode; import org.apache.royale.compiler.internal.tree.as.ObjectLiteralNode; import org.apache.royale.compiler.tree.as.IBinaryOperatorNode; import org.apache.royale.compiler.tree.as.IDynamicAccessNode; import org.apache.royale.compiler.tree.as.IFunctionCallNode; import org.apache.royale.compiler.tree.as.IIterationFlowNode; import org.apache.royale.compiler.tree.as.IMemberAccessExpressionNode; import org.apache.royale.compiler.tree.as.INamespaceAccessExpressionNode; import org.apache.royale.compiler.tree.as.IReturnNode; import org.apache.royale.compiler.tree.as.ITernaryOperatorNode; import org.apache.royale.compiler.tree.as.IThrowNode; import org.apache.royale.compiler.tree.as.IUnaryOperatorNode; import org.junit.Test; public class TestSourceMapExpressions extends SourceMapTestBase { @Override public void setUp() { super.setUp(); project.setAllowPrivateNameConflicts(true); } //---------------------------------- // Primary expression keywords //---------------------------------- //---------------------------------- // Arithmetic //---------------------------------- @Test public void testVisitBinaryOperatorNode_Plus() { IBinaryOperatorNode node = getBinaryNode("a + b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 4); // + assertMapping(node, 0, 4, 0, 4, 0, 5); // b } @Test public void testVisitBinaryOperatorNode_Minus() { IBinaryOperatorNode node = getBinaryNode("a - b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 4); // - assertMapping(node, 0, 4, 0, 4, 0, 5); // b } @Test public void testVisitBinaryOperatorNode_Divide() { IBinaryOperatorNode node = getBinaryNode("a / b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 4); // / assertMapping(node, 0, 4, 0, 4, 0, 5); // b } @Test public void testVisitBinaryOperatorNode_Modulo() { IBinaryOperatorNode node = getBinaryNode("a % b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 4); // % assertMapping(node, 0, 4, 0, 4, 0, 5); // b } @Test public void testVisitBinaryOperatorNode_Multiply() { IBinaryOperatorNode node = getBinaryNode("a * b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 4); // * assertMapping(node, 0, 4, 0, 4, 0, 5); // b } @Test public void testVisitUnaryOperatorNode_PostIncrement() { IUnaryOperatorNode node = getUnaryNode("a++"); asBlockWalker.visitUnaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 3); // ++ } @Test public void testVisitUnaryOperatorNode_PreIncrement() { IUnaryOperatorNode node = getUnaryNode("++a"); asBlockWalker.visitUnaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 2); // ++ assertMapping(node, 0, 2, 0, 2, 0, 3); // a } @Test public void testVisitUnaryOperatorNode_PostDecrement() { IUnaryOperatorNode node = getUnaryNode("a--"); asBlockWalker.visitUnaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 3); // -- } @Test public void testVisitUnaryOperatorNode_PreDecrement() { IUnaryOperatorNode node = getUnaryNode("--a"); asBlockWalker.visitUnaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 2); // -- assertMapping(node, 0, 2, 0, 2, 0, 3); // a } //---------------------------------- // Arithmetic compound assignment //---------------------------------- @Test public void testVisitBinaryOperatorNode_PlusAssignment() { IBinaryOperatorNode node = getBinaryNode("a += b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 5); // += assertMapping(node, 0, 5, 0, 5, 0, 6); // b } @Test public void testVisitBinaryOperatorNode_MinusAssignment() { IBinaryOperatorNode node = getBinaryNode("a -= b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 5); // -= assertMapping(node, 0, 5, 0, 5, 0, 6); // b } @Test public void testVisitBinaryOperatorNode_DivideAssignment() { IBinaryOperatorNode node = getBinaryNode("a /= b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 5); // /= assertMapping(node, 0, 5, 0, 5, 0, 6); // b } @Test public void testVisitBinaryOperatorNode_ModuloAssignment() { IBinaryOperatorNode node = getBinaryNode("a %= b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 5); // %= assertMapping(node, 0, 5, 0, 5, 0, 6); // b } @Test public void testVisitBinaryOperatorNode_MultiplyAssignment() { IBinaryOperatorNode node = getBinaryNode("a *= b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 5); // *= assertMapping(node, 0, 5, 0, 5, 0, 6); // b } //---------------------------------- // Assignment //---------------------------------- @Test public void testVisitBinaryOperatorNode_Assignment() { IBinaryOperatorNode node = getBinaryNode("a = b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 4); // = assertMapping(node, 0, 4, 0, 4, 0, 5); // b } @Test public void testVisitBinaryOperatorNode_AssignmentLiteral() { IBinaryOperatorNode node = getBinaryNode("a = 123.2"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 4); // = assertMapping(node, 0, 4, 0, 4, 0, 9); // 123.2 } @Test public void testVisitBinaryOperatorNode_AssignmentLiteralWithCompileTimeIntCoercion() { IBinaryOperatorNode node = getBinaryNode("var a:int;a = 123.2"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 4); // = assertMapping(node, 0, 4, 0, 4, 0, 7); // 123 } @Test public void testVisitBinaryOperatorNode_AssignmentLiteralWithCompileTimeUintCoercion() { IBinaryOperatorNode node = getBinaryNode("var a:uint;a = -123"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 4); // = assertMapping(node, 0, 4, 0, 4, 0, 14); // 4294967173 } //---------------------------------- // Bitwise //---------------------------------- @Test public void testVisitBinaryOperatorNode_BitwiseAnd() { IBinaryOperatorNode node = getBinaryNode("a & b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 4); // & assertMapping(node, 0, 4, 0, 4, 0, 5); // b } @Test public void testVisitBinaryOperatorNode_BitwiseLeftShift() { IBinaryOperatorNode node = getBinaryNode("a << b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 5); // << assertMapping(node, 0, 5, 0, 5, 0, 6); // b } @Test public void testVisitUnaryOperatorNode_BitwiseNot() { IUnaryOperatorNode node = getUnaryNode("~a"); asBlockWalker.visitUnaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // ~ assertMapping(node, 0, 1, 0, 1, 0, 2); // a } @Test public void testVisitBinaryOperatorNode_BitwiseOr() { IBinaryOperatorNode node = getBinaryNode("a | b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 4); // | assertMapping(node, 0, 4, 0, 4, 0, 5); // b } @Test public void testVisitBinaryOperatorNode_BitwiseRightShift() { IBinaryOperatorNode node = getBinaryNode("a >> b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 5); // >> assertMapping(node, 0, 5, 0, 5, 0, 6); // b } @Test public void testVisitBinaryOperatorNode_BitwiseUnsignedRightShift() { IBinaryOperatorNode node = getBinaryNode("a >>> b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 6); // >>> assertMapping(node, 0, 6, 0, 6, 0, 7); // b } @Test public void testVisitBinaryOperatorNode_BitwiseXOR() { IBinaryOperatorNode node = getBinaryNode("a ^ b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 4); // ^ assertMapping(node, 0, 4, 0, 4, 0, 5); // b } //---------------------------------- // Bitwise compound assignment //---------------------------------- @Test public void testVisitBinaryOperatorNode_BitwiseAndAssignment() { IBinaryOperatorNode node = getBinaryNode("a &= b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 5); // &= assertMapping(node, 0, 5, 0, 5, 0, 6); // b } @Test public void testVisitBinaryOperatorNode_BitwiseLeftShiftAssignment() { IBinaryOperatorNode node = getBinaryNode("a <<= b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 6); // <<= assertMapping(node, 0, 6, 0, 6, 0, 7); // b } @Test public void testVisitBinaryOperatorNode_BitwiseOrAssignment() { IBinaryOperatorNode node = getBinaryNode("a |= b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 5); // |= assertMapping(node, 0, 5, 0, 5, 0, 6); // b } @Test public void testVisitBinaryOperatorNode_BitwiseRightShiftAssignment() { IBinaryOperatorNode node = getBinaryNode("a >>= b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 6); // >>= assertMapping(node, 0, 6, 0, 6, 0, 7); // b } @Test public void testVisitBinaryOperatorNode_BitwiseUnsignedRightShiftAssignment() { IBinaryOperatorNode node = getBinaryNode("a >>>= b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 7); // >>>= assertMapping(node, 0, 7, 0, 7, 0, 8); // b } @Test public void testVisitBinaryOperatorNode_BitwiseXORAssignment() { IBinaryOperatorNode node = getBinaryNode("a ^= b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 5); // ^= assertMapping(node, 0, 5, 0, 5, 0, 6); // b } //---------------------------------- // Comparison //---------------------------------- @Test public void testVisitBinaryOperatorNode_Equal() { IBinaryOperatorNode node = getBinaryNode("a == b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 5); // == assertMapping(node, 0, 5, 0, 5, 0, 6); // b } @Test public void testVisitBinaryOperatorNode_GreaterThan() { IBinaryOperatorNode node = getBinaryNode("a > b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 4); // > assertMapping(node, 0, 4, 0, 4, 0, 5); // b } @Test public void testVisitBinaryOperatorNode_GreaterThanEqual() { IBinaryOperatorNode node = getBinaryNode("a >= b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 5); // >= assertMapping(node, 0, 5, 0, 5, 0, 6); // b } @Test public void testVisitBinaryOperatorNode_NotEqual() { IBinaryOperatorNode node = getBinaryNode("a != b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 5); // != assertMapping(node, 0, 5, 0, 5, 0, 6); // b } @Test public void testVisitBinaryOperatorNode_LessThan() { IBinaryOperatorNode node = getBinaryNode("a < b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 4); // < assertMapping(node, 0, 4, 0, 4, 0, 5); // b } @Test public void testVisitBinaryOperatorNode_LessThanEqual() { IBinaryOperatorNode node = getBinaryNode("a <= b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 5); // <= assertMapping(node, 0, 5, 0, 5, 0, 6); // b } @Test public void testVisitBinaryOperatorNode_StrictEqual() { IBinaryOperatorNode node = getBinaryNode("a === b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 6); // === assertMapping(node, 0, 6, 0, 6, 0, 7); // b } @Test public void testVisitBinaryOperatorNode_StrictNotEqual() { IBinaryOperatorNode node = getBinaryNode("a !== b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 6); // !== assertMapping(node, 0, 6, 0, 6, 0, 7); // b } //---------------------------------- // Logical //---------------------------------- @Test public void testVisitBinaryOperatorNode_LogicalAnd() { IBinaryOperatorNode node = getBinaryNode("a && b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 5); // && assertMapping(node, 0, 5, 0, 5, 0, 6); // b } @Test public void testVisitBinaryOperatorNode_LogicalAndAssignment() { IBinaryOperatorNode node = getBinaryNode("a &&= b"); asBlockWalker.visitBinaryOperator(node); //a = a && b assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 4); // = assertMapping(node, 0, 0, 0, 4, 0, 5); // a assertMapping(node, 0, 1, 0, 5, 0, 9); // && assertMapping(node, 0, 0, 0, 0, 0, 1); // b } @Test public void testVisitUnaryOperatorNode_LogicalNot() { IUnaryOperatorNode node = getUnaryNode("!a"); asBlockWalker.visitUnaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // ! assertMapping(node, 0, 1, 0, 1, 0, 2); // a } @Test public void testVisitBinaryOperatorNode_LogicalOr() { IBinaryOperatorNode node = getBinaryNode("a || b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 5); // || assertMapping(node, 0, 5, 0, 5, 0, 6); // b } @Test public void testVisitBinaryOperatorNode_LogicalOrAssignment() { IBinaryOperatorNode node = getBinaryNode("a ||= b"); asBlockWalker.visitBinaryOperator(node); //a = a || b assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 4); // = assertMapping(node, 0, 0, 0, 4, 0, 5); // a assertMapping(node, 0, 1, 0, 5, 0, 9); // || assertMapping(node, 0, 0, 0, 0, 0, 1); // b } //---------------------------------- // Other //---------------------------------- @Test public void testVisitDynamicAccessNode_1() { IDynamicAccessNode node = getDynamicAccessNode("a[b]"); asBlockWalker.visitDynamicAccess(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 2); // [ assertMapping(node, 0, 2, 0, 2, 0, 3); // b assertMapping(node, 0, 3, 0, 3, 0, 4); // ] } @Test public void testVisitDynamicAccessNode_2() { IDynamicAccessNode node = getDynamicAccessNode("a[b[c][d]]"); asBlockWalker.visitDynamicAccess(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 2); // [ assertMapping(node, 0, 2, 0, 2, 0, 3); // b assertMapping(node, 0, 3, 0, 3, 0, 4); // [ assertMapping(node, 0, 4, 0, 4, 0, 5); // c assertMapping(node, 0, 5, 0, 5, 0, 6); // ] assertMapping(node, 0, 6, 0, 6, 0, 7); // [ assertMapping(node, 0, 7, 0, 7, 0, 8); // d assertMapping(node, 0, 8, 0, 8, 0, 9); // ] assertMapping(node, 0, 9, 0, 9, 0, 10); // ] } @Test public void testVisitBinaryOperatorNode_Comma() { IBinaryOperatorNode node = getBinaryNode("a, b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 3); // , assertMapping(node, 0, 3, 0, 3, 0, 4); // b } @Test public void testVisitTernaryOperatorNode() { ITernaryOperatorNode node = (ITernaryOperatorNode) getExpressionNode( "a ? b : c", ITernaryOperatorNode.class); asBlockWalker.visitTernaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 4); // ? assertMapping(node, 0, 4, 0, 4, 0, 5); // b assertMapping(node, 0, 5, 0, 5, 0, 8); // : assertMapping(node, 0, 8, 0, 8, 0, 9); // c } @Test public void testVisitUnaryOperator_Delete() { IUnaryOperatorNode node = getUnaryNode("delete a"); asBlockWalker.visitUnaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 7); // delete assertMapping(node, 0, 7, 0, 7, 0, 8); // a } @Test public void testVisitMemberAccess_1() { IMemberAccessExpressionNode node = (IMemberAccessExpressionNode) getExpressionNode( "a.b", IMemberAccessExpressionNode.class); asBlockWalker.visitMemberAccessExpression(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 2); // . assertMapping(node, 0, 2, 0, 2, 0, 3); // b } @Test public void testVisitMemberAccess_2() { IMemberAccessExpressionNode node = (IMemberAccessExpressionNode) getExpressionNode( "a.b.c.d", IMemberAccessExpressionNode.class); asBlockWalker.visitMemberAccessExpression(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 2); // . assertMapping(node, 0, 2, 0, 2, 0, 3); // b assertMapping(node, 0, 3, 0, 3, 0, 4); // . assertMapping(node, 0, 4, 0, 4, 0, 5); // c assertMapping(node, 0, 5, 0, 5, 0, 6); // . assertMapping(node, 0, 6, 0, 6, 0, 7); // d } @Test public void testVisitMemberAccess_3() { IMemberAccessExpressionNode node = (IMemberAccessExpressionNode) getNode( "import custom.custom_namespace;use namespace custom_namespace;public class B {custom_namespace var b:Number; public function test() { var a:B = this;a.custom_namespace::b; }}", IMemberAccessExpressionNode.class, WRAP_LEVEL_PACKAGE); asBlockWalker.visitMemberAccessExpression(node); // a.http_$$ns_apache_org$2017$custom$namespace__b assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 2); // . assertMapping(node, 0, 2, 0, 2, 0, 47, "b"); // custom_namespace::b } @Test public void testVisitMemberAccess_4() { INamespaceAccessExpressionNode node = (INamespaceAccessExpressionNode) getNode( "import custom.custom_namespace;use namespace custom_namespace;public class B {custom_namespace var b:Number; public function test() { custom_namespace::b; }}", INamespaceAccessExpressionNode.class, WRAP_LEVEL_PACKAGE); asBlockWalker.visitNamespaceAccessExpression(node); // this.http_$$ns_apache_org$2017$custom$namespace__b assertMapping(node, 0, 18, 0, 0, 0, 5); // this. assertMapping(node, 0, 18, 0, 5, 0, 50, "b"); // custom_namespace::b } @Test public void testVisitMemberAccess_5() { IMemberAccessExpressionNode node = (IMemberAccessExpressionNode) getNode( "import custom.custom_namespace;use namespace custom_namespace;public class B {custom_namespace var b:Number; public function test() { this.custom_namespace::b; }}", IMemberAccessExpressionNode.class, WRAP_LEVEL_PACKAGE); asBlockWalker.visitMemberAccessExpression(node); // this.http_$$ns_apache_org$2017$custom$namespace__b assertMapping(node, 0, 0, 0, 0, 0, 4); // this assertMapping(node, 0, 4, 0, 4, 0, 5); // . assertMapping(node, 0, 5, 0, 5, 0, 50, "b"); // custom_namespace::b } @Test public void testVisitMemberAccess_6() { IMemberAccessExpressionNode node = (IMemberAccessExpressionNode) getNode( "public class B {private var b:Number; public function test() { this.b; }}", IMemberAccessExpressionNode.class, WRAP_LEVEL_PACKAGE); asBlockWalker.visitMemberAccessExpression(node); // this.B_b assertMapping(node, 0, 0, 0, 0, 0, 4); // this assertMapping(node, 0, 4, 0, 4, 0, 5); // . assertMapping(node, 0, 5, 0, 5, 0, 8, "b"); // B_b } @Test public void testVisitMemberAccess_7() { IReturnNode node = (IReturnNode) getNode( "public class B {private var b:Number; public function test() { return b; }}", IReturnNode.class, WRAP_LEVEL_PACKAGE); asBlockWalker.visitReturn(node); // return this.B_b assertMapping(node, 0, 0, 0, 0, 0, 7); // return assertMapping(node, 0, 7, 0, 7, 0, 12); // this. assertMapping(node, 0, 7, 0, 12, 0, 15, "b"); // B_b } @Test public void testVisitBinaryOperator_In() { IBinaryOperatorNode node = getBinaryNode("a in b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 5); // in assertMapping(node, 0, 5, 0, 5, 0, 6); // b } @Test public void testVisitBinaryOperator_Instancof() { IBinaryOperatorNode node = getBinaryNode("a instanceof b"); asBlockWalker.visitBinaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 13); // instanceof assertMapping(node, 0, 13, 0, 13, 0, 14); // b } @Test public void testVisitBinaryOperator_New() { IFunctionCallNode node = (IFunctionCallNode) getExpressionNode( "new Object()", IFunctionCallNode.class); asBlockWalker.visitFunctionCall(node); assertMapping(node, 0, 0, 0, 0, 0, 4); // new assertMapping(node, 0, 4, 0, 4, 0, 10); // Object } @Test public void testVisitObjectLiteral_1() { ObjectLiteralNode node = (ObjectLiteralNode) getExpressionNode( "a = {a:1}", ObjectLiteralNode.class); asBlockWalker.visitLiteral(node); //{a: 1} assertMapping(node, 0, 0, 0, 0, 0, 1); // { assertMapping(node, 0, 1, 0, 1, 0, 2); // a assertMapping(node, 0, 2, 0, 2, 0, 3); // : assertMapping(node, 0, 4, 0, 4, 0, 5); // } } @Test public void testVisitObjectLiteral_2() { ObjectLiteralNode node = (ObjectLiteralNode) getExpressionNode( "a = {a:1,b:{c:2,d:{e:4}}}", ObjectLiteralNode.class); asBlockWalker.visitLiteral(node); //{a:1, b:{c:2, d:{e:4}}} assertMapping(node, 0, 0, 0, 0, 0, 1); // { assertMapping(node, 0, 1, 0, 1, 0, 2); // a assertMapping(node, 0, 2, 0, 2, 0, 3); // : assertMapping(node, 0, 4, 0, 4, 0, 6); // , assertMapping(node, 0, 5, 0, 6, 0, 7); // b assertMapping(node, 0, 6, 0, 7, 0, 8); // : assertMapping(node, 0, 7, 0, 8, 0, 9); // { assertMapping(node, 0, 8, 0, 9, 0, 10); // c assertMapping(node, 0, 9, 0, 10, 0, 11); // : assertMapping(node, 0, 11, 0, 12, 0, 14); // , assertMapping(node, 0, 12, 0, 14, 0, 15); // d assertMapping(node, 0, 13, 0, 15, 0, 16); // : assertMapping(node, 0, 14, 0, 16, 0, 17); // { assertMapping(node, 0, 15, 0, 17, 0, 18); // e assertMapping(node, 0, 16, 0, 18, 0, 19); // : assertMapping(node, 0, 18, 0, 20, 0, 21); // } assertMapping(node, 0, 19, 0, 21, 0, 22); // } assertMapping(node, 0, 20, 0, 22, 0, 23); // } } @Test public void testVisitObjectLiteral_3() { ObjectLiteralNode node = (ObjectLiteralNode) getExpressionNode( "a = { a: 12, bb: 2 \t}", ObjectLiteralNode.class); asBlockWalker.visitLiteral(node); //{a:12, bb:2} assertMapping(node, 0, 0, 0, 0, 0, 1); // { assertMapping(node, 0, 2, 0, 1, 0, 2); // a assertMapping(node, 0, 3, 0, 2, 0, 3); // : assertMapping(node, 0, 7, 0, 5, 0, 7); // , assertMapping(node, 0, 10, 0, 7, 0, 9); // bb assertMapping(node, 0, 12, 0, 9, 0, 10); // : assertMapping(node, 0, 19, 0, 11, 0, 12); // } } @Test public void testVisitObjectLiteral_4() { ObjectLiteralNode node = (ObjectLiteralNode) getExpressionNode( "a = {a:1,\nb:2}", ObjectLiteralNode.class); asBlockWalker.visitLiteral(node); //{a:1, b:2} assertMapping(node, 0, 0, 0, 0, 0, 1); // { assertMapping(node, 0, 1, 0, 1, 0, 2); // a assertMapping(node, 0, 2, 0, 2, 0, 3); // : assertMapping(node, 0, 3, 0, 3, 0, 4); // 1 assertMapping(node, 0, 4, 0, 4, 0, 6); // , assertMapping(node, 1, 0, 0, 6, 0, 7); // b assertMapping(node, 1, 1, 0, 7, 0, 8); // : assertMapping(node, 1, 2, 0, 8, 0, 9); // 2 assertMapping(node, 1, 3, 0, 9, 0, 10); // } } @Test public void testVisitArrayLiteral_1() { ArrayLiteralNode node = (ArrayLiteralNode) getExpressionNode( "a = [0,1,2]", ArrayLiteralNode.class); asBlockWalker.visitLiteral(node); //[0, 1, 2] assertMapping(node, 0, 0, 0, 0, 0, 1); // [ assertMapping(node, 0, 1, 0, 1, 0, 2); // 0 assertMapping(node, 0, 2, 0, 2, 0, 4); // , assertMapping(node, 0, 3, 0, 4, 0, 5); // 1 assertMapping(node, 0, 4, 0, 5, 0, 7); // , assertMapping(node, 0, 5, 0, 7, 0, 8); // 2 assertMapping(node, 0, 6, 0, 8, 0, 9); // ] } @Test public void testVisitArrayLiteral_2() { ArrayLiteralNode node = (ArrayLiteralNode) getExpressionNode( "a = [0,[0,1,[0,1]],2,[1,2]]", ArrayLiteralNode.class); asBlockWalker.visitLiteral(node); //[0, [0, 1, [0, 1]], 2, [1, 2]] assertMapping(node, 0, 0, 0, 0, 0, 1); // [ assertMapping(node, 0, 2, 0, 2, 0, 4); // , assertMapping(node, 0, 3, 0, 4, 0, 5); // [ assertMapping(node, 0, 5, 0, 6, 0, 8); // , assertMapping(node, 0, 7, 0, 9, 0, 11); // , assertMapping(node, 0, 8, 0, 11, 0, 12); // [ assertMapping(node, 0, 10, 0, 13, 0, 15); // , assertMapping(node, 0, 12, 0, 16, 0, 17); // ] assertMapping(node, 0, 13, 0, 17, 0, 18); // ] assertMapping(node, 0, 14, 0, 18, 0, 20); // , assertMapping(node, 0, 16, 0, 21, 0, 23); // , assertMapping(node, 0, 17, 0, 23, 0, 24); // [ assertMapping(node, 0, 19, 0, 25, 0, 27); // , assertMapping(node, 0, 21, 0, 28, 0, 29); // ] assertMapping(node, 0, 22, 0, 29, 0, 30); // ] } @Test public void testVisitArrayLiteral_3() { ArrayLiteralNode node = (ArrayLiteralNode) getExpressionNode( "a = [ 0, 123, 45 \t]", ArrayLiteralNode.class); asBlockWalker.visitLiteral(node); //[0, 123, 45] assertMapping(node, 0, 0, 0, 0, 0, 1); // [ assertMapping(node, 0, 3, 0, 2, 0, 4); // , assertMapping(node, 0, 9, 0, 7, 0, 9); // , assertMapping(node, 0, 17, 0, 11, 0, 12); // ] } @Test public void testVisitArrayLiteral_4() { ArrayLiteralNode node = (ArrayLiteralNode) getExpressionNode( "a = [0,\n123, 45]", ArrayLiteralNode.class); asBlockWalker.visitLiteral(node); //[0, 123, 45] assertMapping(node, 0, 0, 0, 0, 0, 1); // [ assertMapping(node, 0, 1, 0, 1, 0, 2); // 0 assertMapping(node, 0, 2, 0, 2, 0, 4); // , assertMapping(node, 1, 0, 0, 4, 0, 7); // 123 assertMapping(node, 1, 3, 0, 7, 0, 9); // , assertMapping(node, 1, 5, 0, 9, 0, 11); // 45 assertMapping(node, 1, 7, 0, 11, 0, 12); // 45 } @Test public void testVisitUnaryOperatorNode_Typeof() { IUnaryOperatorNode node = getUnaryNode("typeof(a)"); asBlockWalker.visitUnaryOperator(node); //typeof(a) assertMapping(node, 0, 0, 0, 0, 0, 7); // typeof( assertMapping(node, 0, 0, 0, 8, 0, 9); // ) } @Test public void testVisitUnaryOperatorNode_Typeof_NoParens() { // TODO (mschmalle) the notation without parenthesis is also valid in AS/JS IUnaryOperatorNode node = getUnaryNode("typeof a"); asBlockWalker.visitUnaryOperator(node); //typeof(a) assertMapping(node, 0, 0, 0, 0, 0, 7); // typeof( assertMapping(node, 0, 0, 0, 8, 0, 9); // ) } @Test public void testVisitUnaryOperatorNode_Void() { IUnaryOperatorNode node = getUnaryNode("void a"); asBlockWalker.visitUnaryOperator(node); assertMapping(node, 0, 0, 0, 0, 0, 5); // void assertMapping(node, 0, 5, 0, 5, 0, 6); // a } @Test public void testVisitIterationFlowNode_BreakWithoutLabel() { IIterationFlowNode node = (IIterationFlowNode) getNode("break", IIterationFlowNode.class); asBlockWalker.visitIterationFlow(node); assertMapping(node, 0, 0, 0, 0, 0, 5); // break } @Test public void testVisitIterationFlowNode_BreakWithLabel() { IIterationFlowNode node = (IIterationFlowNode) getNode("break label", IIterationFlowNode.class); asBlockWalker.visitIterationFlow(node); assertMapping(node, 0, 0, 0, 0, 0, 6); // break assertMapping(node, 0, 6, 0, 6, 0, 11); // label } @Test public void testVisitIterationFlowNode_ContinueWithoutLabel() { IIterationFlowNode node = (IIterationFlowNode) getNode("continue", IIterationFlowNode.class); asBlockWalker.visitIterationFlow(node); assertMapping(node, 0, 0, 0, 0, 0, 8); // continue } @Test public void testVisitIterationFlowNode_ContinueWithLabel() { IIterationFlowNode node = (IIterationFlowNode) getNode("continue label", IIterationFlowNode.class); asBlockWalker.visitIterationFlow(node); assertMapping(node, 0, 0, 0, 0, 0, 9); // continue assertMapping(node, 0, 9, 0, 9, 0, 14); // label } @Test public void testVisitReturnWithoutValue() { IReturnNode node = (IReturnNode) getNode("return", IReturnNode.class); asBlockWalker.visitReturn(node); assertMapping(node, 0, 0, 0, 0, 0, 6); // return } @Test public void testVisitReturnWithValue() { IReturnNode node = (IReturnNode) getNode("return 0", IReturnNode.class); asBlockWalker.visitReturn(node); assertMapping(node, 0, 0, 0, 0, 0, 7); // return assertMapping(node, 0, 7, 0, 7, 0, 8); // 0 } @Test public void testThrow() { IThrowNode node = (IThrowNode) getNode("throw a", IThrowNode.class); asBlockWalker.visitThrow(node); assertMapping(node, 0, 0, 0, 0, 0, 6); // throw assertMapping(node, 0, 6, 0, 6, 0, 7); // a } @Test public void testVisitFunctionCall_1() { IFunctionCallNode node = (IFunctionCallNode) getNode("a()", IFunctionCallNode.class); asBlockWalker.visitFunctionCall(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 2); // ( assertMapping(node, 0, 2, 0, 2, 0, 3); // ) } @Test public void testVisitFunctionCall_2() { IFunctionCallNode node = (IFunctionCallNode) getNode("a(b)", IFunctionCallNode.class); asBlockWalker.visitFunctionCall(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 2); // ( assertMapping(node, 0, 2, 0, 2, 0, 3); // b assertMapping(node, 0, 3, 0, 3, 0, 4); // ) } @Test public void testVisitFunctionCall_3() { IFunctionCallNode node = (IFunctionCallNode) getNode("a(b, c)", IFunctionCallNode.class); asBlockWalker.visitFunctionCall(node); assertMapping(node, 0, 0, 0, 0, 0, 1); // a assertMapping(node, 0, 1, 0, 1, 0, 2); // ( assertMapping(node, 0, 2, 0, 2, 0, 3); // b assertMapping(node, 0, 3, 0, 3, 0, 5); // , assertMapping(node, 0, 5, 0, 5, 0, 6); // c assertMapping(node, 0, 6, 0, 6, 0, 7); // ) } protected IBackend createBackend() { return new RoyaleBackend(); } }
apache/geode
36,823
geode-core/src/main/java/org/apache/geode/cache/client/internal/ServerRegionProxy.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.cache.client.internal; import static java.util.Collections.emptySet; import static org.apache.geode.util.internal.UncheckedUtils.uncheckedCast; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Supplier; import org.apache.logging.log4j.Logger; import org.jetbrains.annotations.NotNull; import org.apache.geode.cache.DataPolicy; import org.apache.geode.cache.InterestResultPolicy; import org.apache.geode.cache.Operation; import org.apache.geode.cache.Region; import org.apache.geode.cache.Region.Entry; import org.apache.geode.cache.client.PoolManager; import org.apache.geode.cache.client.internal.ContainsKeyOp.MODE; import org.apache.geode.cache.execute.Function; import org.apache.geode.cache.execute.ResultCollector; import org.apache.geode.distributed.internal.ServerLocation; import org.apache.geode.internal.cache.ClientServerObserver; import org.apache.geode.internal.cache.ClientServerObserverHolder; import org.apache.geode.internal.cache.EntryEventImpl; import org.apache.geode.internal.cache.EventID; import org.apache.geode.internal.cache.EventIDHolder; import org.apache.geode.internal.cache.LocalRegion; import org.apache.geode.internal.cache.TXCommitMessage; import org.apache.geode.internal.cache.TXManagerImpl; import org.apache.geode.internal.cache.TXStateProxy; import org.apache.geode.internal.cache.execute.ServerRegionFunctionExecutor; import org.apache.geode.internal.cache.tier.InterestType; import org.apache.geode.internal.cache.tier.sockets.VersionedObjectList; import org.apache.geode.internal.cache.tier.sockets.VersionedObjectList.Iterator; import org.apache.geode.internal.cache.tx.ClientTXStateStub; import org.apache.geode.internal.cache.tx.TransactionalOperation.ServerRegionOperation; import org.apache.geode.logging.internal.log4j.api.LogService; /** * Used to send region operations from a client to a server * * @since GemFire 5.7 */ public class ServerRegionProxy extends ServerProxy implements ServerRegionDataAccess { private static final Logger logger = LogService.getLogger(); private final LocalRegion region; private final String regionName; /** * Creates a server region proxy for the given region. * * @param r the region * @throws IllegalStateException if the region does not have a pool */ public ServerRegionProxy(Region<?, ?> r) { super(calcPool(r)); assert r instanceof LocalRegion; region = (LocalRegion) r; regionName = r.getFullPath(); } /** * Used by tests to create proxies for "fake" regions. Also, used by ClientStatsManager for admin * region. */ public ServerRegionProxy(String regionName, InternalPool pool) { super(pool); region = null; this.regionName = regionName; } private static InternalPool calcPool(Region<?, ?> r) { String poolName = r.getAttributes().getPoolName(); if (poolName == null || "".equals(poolName)) { throw new IllegalStateException( "The region " + r.getFullPath() + " did not have a client pool configured."); } else { InternalPool pool = (InternalPool) PoolManager.find(poolName); if (pool == null) { throw new IllegalStateException("The pool " + poolName + " does not exist."); } return pool; } } /* * (non-Javadoc) * * @see org.apache.geode.cache.client.internal.ServerRegionDataAccess#get(java.lang.Object, * java.lang.Object) */ @Override public Object get(Object key, Object callbackArg, EntryEventImpl clientEvent) { recordTXOperation(ServerRegionOperation.GET, key, callbackArg); return GetOp.execute(pool, region, key, callbackArg, pool.getPRSingleHopEnabled(), clientEvent); } @Override public int size() { return SizeOp.execute(pool, regionName); } /** * Do not call this method if the value is Delta instance. Exclicitly passing * <code>Operation.CREATE</code> to the <code>PutOp.execute()</code> method as the caller of this * method does not put Delta instances as value. * */ public Object putForMetaRegion(Object key, Object value, byte[] deltaBytes, EntryEventImpl event, Object callbackArg) { if (region == null) { return PutOp.execute(pool, regionName, key, value, deltaBytes, event, Operation.CREATE, false, null, callbackArg, pool.getPRSingleHopEnabled()); } else { return PutOp.execute(pool, region, key, value, deltaBytes, event, Operation.CREATE, false, null, callbackArg, pool.getPRSingleHopEnabled()); } } @Override public Object put(Object key, Object value, byte[] deltaBytes, EntryEventImpl event, Operation op, boolean requireOldValue, Object expectedOldValue, Object callbackArg, boolean isCreate) { recordTXOperation(ServerRegionOperation.PUT, key, value, deltaBytes, event.getEventId(), op, requireOldValue, expectedOldValue, callbackArg, isCreate); Operation operation = op; if (!isCreate && region.getDataPolicy() == DataPolicy.EMPTY && op.isCreate() && op != Operation.PUT_IF_ABSENT) { operation = Operation.UPDATE; } if (region == null) { return PutOp.execute(pool, regionName, key, value, deltaBytes, event, operation, requireOldValue, expectedOldValue, callbackArg, pool.getPRSingleHopEnabled()); } else { return PutOp.execute(pool, region, key, value, deltaBytes, event, operation, requireOldValue, expectedOldValue, callbackArg, pool.getPRSingleHopEnabled()); } } /** * Does a region put on the server using the given connection. * * @param con the connection to use to send to the server * @param key the entry key to do the put on * @param value the entry value to put * @param eventId the event ID for this put * @param callbackArg an optional callback arg to pass to any cache callbacks */ public void putOnForTestsOnly(Connection con, Object key, Object value, EventID eventId, Object callbackArg) { EventIDHolder event = new EventIDHolder(eventId); PutOp.execute(con, pool, regionName, key, value, event, callbackArg, pool.getPRSingleHopEnabled()); } /* * (non-Javadoc) * * @see org.apache.geode.cache.client.internal.ServerRegionDataAccess#destroy(java.lang.Object, * java.lang.Object, org.apache.geode.cache.Operation, org.apache.geode.internal.cache.EventID, * java.lang.Object) */ @Override public Object destroy(Object key, Object expectedOldValue, Operation operation, EntryEventImpl event, Object callbackArg) { if (event.isBulkOpInProgress()) { // this is a removeAll, ignore this! return null; } recordTXOperation(ServerRegionOperation.DESTROY, key, expectedOldValue, operation, event.getEventId(), callbackArg); return DestroyOp.execute(pool, region, key, expectedOldValue, operation, event, callbackArg, pool.getPRSingleHopEnabled()); } @Override public void invalidate(EntryEventImpl event) { recordTXOperation(ServerRegionOperation.INVALIDATE, event.getKey(), event); InvalidateOp.execute(pool, region.getFullPath(), event, pool.getPRSingleHopEnabled(), region); } /** * Does a region entry destroy on the server using the given connection. * * @param con the connection to use to send to the server * @param key the entry key to do the destroy on * @param expectedOldValue the value that the entry must have to perform the operation, or null * @param operation the operation being performed (Operation.DESTROY, Operation.REMOVE) * @param event the event for this destroy operation * @param callbackArg an optional callback arg to pass to any cache callbacks */ public void destroyOnForTestsOnly(Connection con, Object key, Object expectedOldValue, Operation operation, EntryEventImpl event, Object callbackArg) { DestroyOp.execute(con, pool, regionName, key, expectedOldValue, operation, event, callbackArg); } /** * Does a region destroy on the server * * @param eventId the event id for this destroy * @param callbackArg an optional callback arg to pass to any cache callbacks */ public void destroyRegion(EventID eventId, Object callbackArg) { DestroyRegionOp.execute(pool, regionName, eventId, callbackArg); } /** * Does a region destroy on the server using the given connection. * * @param con the connection to use to send to the server * @param eventId the event id for this destroy * @param callbackArg an optional callback arg to pass to any cache callbacks */ public void destroyRegionOnForTestsOnly(Connection con, EventID eventId, Object callbackArg) { DestroyRegionOp.execute(con, pool, regionName, eventId, callbackArg); } public TXCommitMessage commit(int txId) { return CommitOp.execute(pool, txId); } public void rollback(int txId) { RollbackOp.execute(pool, txId); } /* * (non-Javadoc) * * @see * org.apache.geode.cache.client.internal.ServerRegionDataAccess#clear(org.apache.geode.internal. * cache.EventID, java.lang.Object) */ @Override public void clear(EventID eventId, Object callbackArg) { ClearOp.execute(pool, regionName, eventId, callbackArg); } /** * Does a region clear on the server using the given connection. * * @param con the connection to use to send to the server * @param eventId the event id for this clear * @param callbackArg an optional callback arg to pass to any cache callbacks */ public void clearOnForTestsOnly(Connection con, EventID eventId, Object callbackArg) { ClearOp.execute(con, pool, regionName, eventId, callbackArg); } /* * (non-Javadoc) * * @see * org.apache.geode.cache.client.internal.ServerRegionDataAccess#containsKey(java.lang.Object) */ @Override public boolean containsKey(Object key) { recordTXOperation(ServerRegionOperation.CONTAINS_KEY, key); return ContainsKeyOp.execute(pool, regionName, key, MODE.KEY); } /* * (non-Javadoc) * * @see * org.apache.geode.cache.client.internal.ServerRegionDataAccess#containsKey(java.lang.Object) */ @Override public boolean containsValueForKey(Object key) { recordTXOperation(ServerRegionOperation.CONTAINS_VALUE_FOR_KEY, key); return ContainsKeyOp.execute(pool, regionName, key, MODE.VALUE_FOR_KEY); } /* * (non-Javadoc) * * @see * org.apache.geode.cache.client.internal.ServerRegionDataAccess#containsKey(java.lang.Object) */ @Override public boolean containsValue(Object value) { recordTXOperation(ServerRegionOperation.CONTAINS_VALUE, null, value); return ContainsKeyOp.execute(pool, regionName, value, MODE.VALUE); } /* * (non-Javadoc) * * @see org.apache.geode.cache.client.internal.ServerRegionDataAccess#keySet() */ @Override public Set<?> keySet() { recordTXOperation(ServerRegionOperation.KEY_SET, null); return KeySetOp.execute(pool, regionName); } /** * Does a region registerInterest on a server * * @param key describes what we are interested in * @param interestType the {@link InterestType} for this registration * @param policy the interest result policy for this registration * @param isDurable true if this registration is durable * @param regionDataPolicy the data policy ordinal of the region * @return list of keys */ public <K> List<List<K>> registerInterest(@NotNull final K key, final @NotNull InterestType interestType, final @NotNull InterestResultPolicy policy, final boolean isDurable, final @NotNull DataPolicy regionDataPolicy) { return registerInterest(key, interestType, policy, isDurable, false, regionDataPolicy); } /** * Does a region registerInterest on a server * * @param key describes what we are interested in * @param interestType the {@link InterestType} for this registration * @param policy the interest result policy for this registration * @param isDurable true if this registration is durable * @param receiveUpdatesAsInvalidates whether to act like notify-by-subscription is false. * @param regionDataPolicy the data policy ordinal of the region * @return list of keys */ public <K> List<List<K>> registerInterest(final @NotNull K key, final @NotNull InterestType interestType, final @NotNull InterestResultPolicy policy, final boolean isDurable, final boolean receiveUpdatesAsInvalidates, final @NotNull DataPolicy regionDataPolicy) { if (interestType == InterestType.KEY && key instanceof List) { logger.warn( "Usage of registerInterest(List) has been deprecated. Please use registerInterestForKeys(Iterable)"); return registerInterestList(uncheckedCast(key), policy, isDurable, receiveUpdatesAsInvalidates, regionDataPolicy); } else { final RegisterInterestTracker rit = pool.getRITracker(); boolean finished = false; try { // register with the tracker early rit.addSingleInterest(region, key, interestType, policy, isDurable, receiveUpdatesAsInvalidates); final List<List<K>> result = RegisterInterestOp.execute(pool, regionName, key, interestType, policy, isDurable, receiveUpdatesAsInvalidates, regionDataPolicy); //////// TEST PURPOSE ONLY /////////// if (PoolImpl.AFTER_REGISTER_CALLBACK_FLAG) { ClientServerObserver bo = ClientServerObserverHolder.getInstance(); bo.afterInterestRegistration(); } ///////////////////////////////////////// finished = true; return result; } finally { if (!finished) { rit.removeSingleInterest(region, key, interestType, isDurable, receiveUpdatesAsInvalidates); } } } } /** * Support for server-side interest registration */ public void addSingleInterest(Object key, final @NotNull InterestType interestType, InterestResultPolicy pol, boolean isDurable, boolean receiveUpdatesAsInvalidates) { RegisterInterestTracker rit = pool.getRITracker(); boolean finished = false; try { rit.addSingleInterest(region, key, interestType, pol, isDurable, receiveUpdatesAsInvalidates); finished = true; } finally { if (!finished) { rit.removeSingleInterest(region, key, interestType, isDurable, receiveUpdatesAsInvalidates); } } } public <K> void addListInterest(final @NotNull List<K> keys, final @NotNull InterestResultPolicy pol, final boolean isDurable, final boolean receiveUpdatesAsInvalidates) { RegisterInterestTracker rit = pool.getRITracker(); boolean finished = false; try { rit.addInterestList(region, keys, pol, isDurable, receiveUpdatesAsInvalidates); finished = true; } finally { if (!finished) { rit.removeInterestList(region, keys, isDurable, receiveUpdatesAsInvalidates); } } } /** * Support for server-side interest registration */ public void removeSingleInterest(final @NotNull Object key, final @NotNull InterestType interestType, final boolean isDurable, final boolean receiveUpdatesAsInvalidates) { pool.getRITracker().removeSingleInterest(region, key, interestType, isDurable, receiveUpdatesAsInvalidates); } public <K> void removeListInterest(final @NotNull List<K> keys, final boolean isDurable, final boolean receiveUpdatesAsInvalidates) { pool.getRITracker().removeInterestList(region, keys, isDurable, receiveUpdatesAsInvalidates); } /** * Does a region registerInterest on a server described by the given server location * <p> * Note that this call by-passes the RegisterInterestTracker. * * @param sl the server to do the register interest on. * @param key describes what we are interested in * @param interestType the {@link InterestType} for this registration * @param policy the interest result policy for this registration * @param isDurable true if this registration is durable * @param regionDataPolicy the data policy ordinal of the region * @return list of keys */ public <K> List<K> registerInterestOn(final @NotNull ServerLocation sl, final @NotNull K key, final @NotNull InterestType interestType, final @NotNull InterestResultPolicy policy, final boolean isDurable, final @NotNull DataPolicy regionDataPolicy) { return registerInterestOn(sl, key, interestType, policy, isDurable, false, regionDataPolicy); } /** * Does a region registerInterest on a server described by the given server location * <p> * Note that this call by-passes the RegisterInterestTracker. * * @param sl the server to do the register interest on. * @param key describes what we are interested in * @param interestType the {@link InterestType} for this registration * @param policy the interest result policy for this registration * @param isDurable true if this registration is durable * @param receiveUpdatesAsInvalidates whether to act like notify-by-subscription is false. * @param regionDataPolicy the data policy ordinal of the region * @return list of keys */ private <K> List<K> registerInterestOn(final @NotNull ServerLocation sl, final @NotNull K key, final @NotNull InterestType interestType, final @NotNull InterestResultPolicy policy, final boolean isDurable, final boolean receiveUpdatesAsInvalidates, final @NotNull DataPolicy regionDataPolicy) { if (interestType == InterestType.KEY && key instanceof List) { return RegisterInterestListOp.executeOn(sl, pool, regionName, uncheckedCast(key), policy, isDurable, receiveUpdatesAsInvalidates, regionDataPolicy); } else { return RegisterInterestOp.executeOn(sl, pool, regionName, key, interestType, policy, isDurable, receiveUpdatesAsInvalidates, regionDataPolicy); } } /** * Does a region registerInterest on a server described by the given connection * <p> * Note that this call by-passes the RegisterInterestTracker. * * @param conn the connection to do the register interest on. * @param key describes what we are interested in * @param interestType the {@link InterestType} for this registration * @param policy the interest result policy for this registration * @param isDurable true if this registration is durable * @param receiveUpdatesAsInvalidates whether to act like notify-by-subscription is false. * @param regionDataPolicy the data policy ordinal of the region * @return list of keys */ <K> List<K> registerInterestOn(final @NotNull Connection conn, final @NotNull K key, final @NotNull InterestType interestType, final InterestResultPolicy policy, final boolean isDurable, final boolean receiveUpdatesAsInvalidates, final @NotNull DataPolicy regionDataPolicy) { if (interestType == InterestType.KEY && key instanceof List) { return RegisterInterestListOp.executeOn(conn, pool, regionName, uncheckedCast(key), policy, isDurable, receiveUpdatesAsInvalidates, regionDataPolicy); } else { return RegisterInterestOp.executeOn(conn, pool, regionName, key, interestType, policy, isDurable, receiveUpdatesAsInvalidates, regionDataPolicy); } } /** * Does a region registerInterestList on a server * * @param keys list of keys we are interested in * @param policy the interest result policy for this registration * @param isDurable true if this registration is durable * @param regionDataPolicy the data policy ordinal of the region * @return list of keys */ public <K> List<K> registerInterestList(final @NotNull List<K> keys, final @NotNull InterestResultPolicy policy, final boolean isDurable, final boolean receiveUpdatesAsInvalidates, final @NotNull DataPolicy regionDataPolicy) { final RegisterInterestTracker rit = pool.getRITracker(); List<K> result; boolean finished = false; try { // register with the tracker early rit.addInterestList(region, keys, policy, isDurable, receiveUpdatesAsInvalidates); result = RegisterInterestListOp.execute(pool, regionName, keys, policy, isDurable, receiveUpdatesAsInvalidates, regionDataPolicy); finished = true; //////// TEST PURPOSE ONLY /////////// if (PoolImpl.AFTER_REGISTER_CALLBACK_FLAG) { ClientServerObserver bo = ClientServerObserverHolder.getInstance(); bo.afterInterestRegistration(); } ///////////////////////////////////////// return result; } finally { if (!finished) { rit.removeInterestList(region, keys, isDurable, receiveUpdatesAsInvalidates); } } } /** * Does a region unregisterInterest on a server * * @param key describes what we are no longer interested in * @param interestType the {@link InterestType} for this unregister * @param isClosing true if this unregister is done by a close * @param keepAlive true if this unregister should not undo a durable registration */ public void unregisterInterest(Object key, final @NotNull InterestType interestType, boolean isClosing, boolean keepAlive) { if (interestType == InterestType.KEY && key instanceof List) { unregisterInterestList(uncheckedCast(key), isClosing, keepAlive); } else { RegisterInterestTracker rit = pool.getRITracker(); boolean removed = rit.removeSingleInterest(region, key, interestType, false, false) || rit.removeSingleInterest(region, key, interestType, true, false) || rit.removeSingleInterest(region, key, interestType, false, true) || rit.removeSingleInterest(region, key, interestType, true, true); if (removed) { UnregisterInterestOp.execute(pool, regionName, key, interestType, isClosing, keepAlive); } } } /** * Does a region unregisterInterestList on a server * * @param keys list of keys we are interested in * @param isClosing true if this unregister is done by a close * @param keepAlive true if this unregister should not undo a durable registration */ public <K> void unregisterInterestList(List<K> keys, boolean isClosing, boolean keepAlive) { RegisterInterestTracker rit = pool.getRITracker(); boolean removed = rit.removeInterestList(region, keys, false, true) || rit.removeInterestList(region, keys, false, false) || rit.removeInterestList(region, keys, true, true) || rit.removeInterestList(region, keys, true, false); if (removed) { UnregisterInterestListOp.execute(pool, regionName, keys, isClosing, keepAlive); } } public <K> List<K> getInterestList(final @NotNull InterestType interestType) { return pool.getRITracker().getInterestList(regionName, interestType); } @Override public VersionedObjectList putAll(Map<Object, Object> map, EventID eventId, boolean skipCallbacks, Object callbackArg) { recordTXOperation(ServerRegionOperation.PUT_ALL, null, map, eventId); int txID = TXManagerImpl.getCurrentTXUniqueId(); if (pool.getPRSingleHopEnabled() && (txID == TXManagerImpl.NOTX)) { return PutAllOp.execute(pool, uncheckedCast(region), map, eventId, skipCallbacks, pool.getRetryAttempts(), callbackArg); } else { return PutAllOp.execute(pool, region, map, eventId, skipCallbacks, false, callbackArg); } } @Override public VersionedObjectList removeAll(Collection<Object> keys, EventID eventId, Object callbackArg) { recordTXOperation(ServerRegionOperation.REMOVE_ALL, null, keys, eventId); int txID = TXManagerImpl.getCurrentTXUniqueId(); if (pool.getPRSingleHopEnabled() && (txID == TXManagerImpl.NOTX)) { return RemoveAllOp.execute(pool, region, keys, eventId, pool.getRetryAttempts(), callbackArg); } else { return RemoveAllOp.execute(pool, region, keys, eventId, false, callbackArg); } } @Override public VersionedObjectList getAll(List<Object> keys, Object callback) { recordTXOperation(ServerRegionOperation.GET_ALL, null, keys); int txID = TXManagerImpl.getCurrentTXUniqueId(); VersionedObjectList result; if (pool.getPRSingleHopEnabled() && (txID == TXManagerImpl.NOTX)) { result = GetAllOp.execute(pool, region, keys, pool.getRetryAttempts(), callback); } else { result = GetAllOp.execute(pool, regionName, keys, callback); } if (result != null) { for (Iterator it = result.iterator(); it.hasNext();) { VersionedObjectList.Entry entry = it.next(); Object key = entry.getKey(); Object value = entry.getValue(); boolean isOnServer = entry.isKeyNotOnServer(); if (!isOnServer) { if (value instanceof Throwable) { logger.warn(String.format( "%s: Caught the following exception attempting to get value for key=%s", value, key), (Throwable) value); } } } } return result; } /** * Release use of this pool */ public void detach(boolean keepalive) { pool.getRITracker().unregisterRegion(this, keepalive); super.detach(); } @Override public String getRegionName() { return regionName; } @Override public Region<?, ?> getRegion() { return region; } public void executeFunction(Function<?> function, ServerRegionFunctionExecutor serverRegionExecutor, ResultCollector<?, ?> resultCollector, byte hasResult, final int timeoutMs) { recordTXOperation(ServerRegionOperation.EXECUTE_FUNCTION, null, 1, function, serverRegionExecutor, resultCollector, hasResult); int retryAttempts = pool.getRetryAttempts(); boolean inTransaction = TXManagerImpl.getCurrentTXState() != null; final Supplier<AbstractOp> executeRegionFunctionOpSupplier = () -> new ExecuteRegionFunctionOp.ExecuteRegionFunctionOpImpl(region.getFullPath(), function, serverRegionExecutor, resultCollector, timeoutMs); if (pool.getPRSingleHopEnabled() && !inTransaction) { ClientMetadataService cms = region.getCache().getClientMetadataService(); if (cms.isMetadataStable()) { if (serverRegionExecutor.getFilter().isEmpty()) { Map<ServerLocation, Set<Integer>> serverToBuckets = cms.groupByServerToAllBuckets(region, function.optimizeForWrite()); if (serverToBuckets == null || serverToBuckets.isEmpty()) { ExecuteRegionFunctionOp.execute(pool, resultCollector, retryAttempts, function.isHA(), (ExecuteRegionFunctionOp.ExecuteRegionFunctionOpImpl) executeRegionFunctionOpSupplier .get(), false, emptySet()); cms.scheduleGetPRMetaData(region, false); } else { final java.util.function.Function<ServerRegionFunctionExecutor, AbstractOp> regionFunctionSingleHopOpFunction = executor -> new ExecuteRegionFunctionSingleHopOp.ExecuteRegionFunctionSingleHopOpImpl( region.getFullPath(), function, executor, resultCollector, hasResult, emptySet(), true, timeoutMs); ExecuteRegionFunctionSingleHopOp.execute(pool, region, serverRegionExecutor, resultCollector, serverToBuckets, function.isHA(), regionFunctionSingleHopOpFunction, executeRegionFunctionOpSupplier); } } else { boolean isBucketFilter = serverRegionExecutor.getExecuteOnBucketSetFlag(); Map<ServerLocation, Set> serverToFilterMap = cms.getServerToFilterMap(serverRegionExecutor.getFilter(), region, function.optimizeForWrite(), isBucketFilter); if (serverToFilterMap == null || serverToFilterMap.isEmpty()) { ExecuteRegionFunctionOp.execute(pool, resultCollector, retryAttempts, function.isHA(), (ExecuteRegionFunctionOp.ExecuteRegionFunctionOpImpl) executeRegionFunctionOpSupplier .get(), false, emptySet()); cms.scheduleGetPRMetaData(region, false); } else { final java.util.function.Function<ServerRegionFunctionExecutor, AbstractOp> regionFunctionSingleHopOpFunction = executor -> new ExecuteRegionFunctionSingleHopOp.ExecuteRegionFunctionSingleHopOpImpl( region.getFullPath(), function, executor, resultCollector, hasResult, emptySet(), isBucketFilter, timeoutMs); ExecuteRegionFunctionSingleHopOp.execute(pool, region, serverRegionExecutor, resultCollector, serverToFilterMap, function.isHA(), regionFunctionSingleHopOpFunction, executeRegionFunctionOpSupplier); } } } else { cms.scheduleGetPRMetaData(region, false); ExecuteRegionFunctionOp.execute(pool, resultCollector, retryAttempts, function.isHA(), (ExecuteRegionFunctionOp.ExecuteRegionFunctionOpImpl) executeRegionFunctionOpSupplier .get(), false, emptySet()); } } else { ExecuteRegionFunctionOp.execute(pool, resultCollector, retryAttempts, function.isHA(), (ExecuteRegionFunctionOp.ExecuteRegionFunctionOpImpl) executeRegionFunctionOpSupplier .get(), false, emptySet()); } } public void executeFunction(String functionId, ServerRegionFunctionExecutor serverRegionExecutor, ResultCollector<?, ?> resultCollector, byte hasResult, boolean isHA, boolean optimizeForWrite, final int timeoutMs) { recordTXOperation(ServerRegionOperation.EXECUTE_FUNCTION, null, 2, functionId, serverRegionExecutor, resultCollector, hasResult, isHA, optimizeForWrite); int retryAttempts = pool.getRetryAttempts(); boolean inTransaction = TXManagerImpl.getCurrentTXState() != null; final Supplier<AbstractOp> executeRegionFunctionOpSupplier = () -> new ExecuteRegionFunctionOp.ExecuteRegionFunctionOpImpl(region.getFullPath(), functionId, serverRegionExecutor, resultCollector, hasResult, isHA, optimizeForWrite, true, timeoutMs); if (pool.getPRSingleHopEnabled() && !inTransaction) { ClientMetadataService cms = region.getCache().getClientMetadataService(); if (cms.isMetadataStable()) { if (serverRegionExecutor.getFilter().isEmpty()) { Map<ServerLocation, Set<Integer>> serverToBuckets = cms.groupByServerToAllBuckets(region, optimizeForWrite); if (serverToBuckets == null || serverToBuckets.isEmpty()) { ExecuteRegionFunctionOp.execute(pool, resultCollector, retryAttempts, isHA, (ExecuteRegionFunctionOp.ExecuteRegionFunctionOpImpl) executeRegionFunctionOpSupplier .get(), false, emptySet()); cms.scheduleGetPRMetaData(region, false); } else { final java.util.function.Function<ServerRegionFunctionExecutor, AbstractOp> regionFunctionSingleHopOpFunction = executor1 -> new ExecuteRegionFunctionSingleHopOp.ExecuteRegionFunctionSingleHopOpImpl( region.getFullPath(), functionId, executor1, resultCollector, hasResult, emptySet(), true, isHA, optimizeForWrite, timeoutMs); ExecuteRegionFunctionSingleHopOp.execute(pool, region, serverRegionExecutor, resultCollector, serverToBuckets, isHA, regionFunctionSingleHopOpFunction, executeRegionFunctionOpSupplier); } } else { boolean isBucketsAsFilter = serverRegionExecutor.getExecuteOnBucketSetFlag(); Map<ServerLocation, Set> serverToFilterMap = cms.getServerToFilterMap( serverRegionExecutor.getFilter(), region, optimizeForWrite, isBucketsAsFilter); if (serverToFilterMap == null || serverToFilterMap.isEmpty()) { ExecuteRegionFunctionOp.execute(pool, resultCollector, retryAttempts, isHA, (ExecuteRegionFunctionOp.ExecuteRegionFunctionOpImpl) executeRegionFunctionOpSupplier .get(), false, emptySet()); cms.scheduleGetPRMetaData(region, false); } else { final java.util.function.Function<ServerRegionFunctionExecutor, AbstractOp> regionFunctionSingleHopOpFunction = executor -> new ExecuteRegionFunctionSingleHopOp.ExecuteRegionFunctionSingleHopOpImpl( region.getFullPath(), functionId, executor, resultCollector, hasResult, emptySet(), isBucketsAsFilter, isHA, optimizeForWrite, timeoutMs); ExecuteRegionFunctionSingleHopOp.execute(pool, region, serverRegionExecutor, resultCollector, serverToFilterMap, isHA, regionFunctionSingleHopOpFunction, executeRegionFunctionOpSupplier); } } } else { cms.scheduleGetPRMetaData(region, false); ExecuteRegionFunctionOp.execute(pool, resultCollector, retryAttempts, isHA, (ExecuteRegionFunctionOp.ExecuteRegionFunctionOpImpl) executeRegionFunctionOpSupplier .get(), false, emptySet()); } } else { ExecuteRegionFunctionOp.execute(pool, resultCollector, retryAttempts, isHA, (ExecuteRegionFunctionOp.ExecuteRegionFunctionOpImpl) executeRegionFunctionOpSupplier .get(), false, emptySet()); } } public void executeFunctionNoAck(String rgnName, Function<?> function, ServerRegionFunctionExecutor serverRegionExecutor, byte hasResult) { recordTXOperation(ServerRegionOperation.EXECUTE_FUNCTION, null, 3, function, serverRegionExecutor, hasResult); ExecuteRegionFunctionNoAckOp.execute(pool, rgnName, function, serverRegionExecutor, hasResult); } public void executeFunctionNoAck(String rgnName, String functionId, ServerRegionFunctionExecutor serverRegionExecutor, byte hasResult, boolean isHA, boolean optimizeForWrite) { recordTXOperation(ServerRegionOperation.EXECUTE_FUNCTION, null, 4, functionId, serverRegionExecutor, hasResult); ExecuteRegionFunctionNoAckOp.execute(pool, rgnName, functionId, serverRegionExecutor, hasResult, isHA, optimizeForWrite); } @Override public Entry<?, ?> getEntry(Object key) { recordTXOperation(ServerRegionOperation.GET_ENTRY, key); return (Entry<?, ?>) GetEntryOp.execute(pool, region, key); } /** * Transaction synchronization notification to the servers * * @see org.apache.geode.internal.cache.tx.ClientTXStateStub#beforeCompletion() */ public void beforeCompletion(int txId) { TXSynchronizationOp.execute(pool, 0, txId, TXSynchronizationOp.CompletionType.BEFORE_COMPLETION); } /** * Transaction synchronization notification to the servers * * @return the server's TXCommitMessage * @see org.apache.geode.internal.cache.tx.ClientTXStateStub#afterCompletion(int) */ public TXCommitMessage afterCompletion(int status, int txId) { return TXSynchronizationOp.execute(pool, status, txId, TXSynchronizationOp.CompletionType.AFTER_COMPLETION); } public byte[] getFunctionAttributes(String functionId) { return (byte[]) GetFunctionAttributeOp.execute(pool, functionId); } /** test hook */ private void recordTXOperation(ServerRegionOperation op, Object key, Object... arguments) { if (ClientTXStateStub.transactionRecordingEnabled()) { TXStateProxy tx = TXManagerImpl.getCurrentTXState(); if (tx == null) { return; } tx.recordTXOperation(this, op, key, arguments); } } }
googleapis/sdk-platform-java
36,510
java-common-protos/proto-google-common-protos/src/main/java/com/google/api/MethodPolicy.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/api/policy.proto // Protobuf Java Version: 3.25.8 package com.google.api; /** * * * <pre> * Defines policies applying to an RPC method. * </pre> * * Protobuf type {@code google.api.MethodPolicy} */ public final class MethodPolicy extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.api.MethodPolicy) MethodPolicyOrBuilder { private static final long serialVersionUID = 0L; // Use MethodPolicy.newBuilder() to construct. private MethodPolicy(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private MethodPolicy() { selector_ = ""; requestPolicies_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new MethodPolicy(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.api.PolicyProto.internal_static_google_api_MethodPolicy_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.api.PolicyProto.internal_static_google_api_MethodPolicy_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.api.MethodPolicy.class, com.google.api.MethodPolicy.Builder.class); } public static final int SELECTOR_FIELD_NUMBER = 9; @SuppressWarnings("serial") private volatile java.lang.Object selector_ = ""; /** * * * <pre> * Selects a method to which these policies should be enforced, for example, * "google.pubsub.v1.Subscriber.CreateSubscription". * * Refer to [selector][google.api.DocumentationRule.selector] for syntax * details. * * NOTE: This field must not be set in the proto annotation. It will be * automatically filled by the service config compiler . * </pre> * * <code>string selector = 9;</code> * * @return The selector. */ @java.lang.Override public java.lang.String getSelector() { java.lang.Object ref = selector_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); selector_ = s; return s; } } /** * * * <pre> * Selects a method to which these policies should be enforced, for example, * "google.pubsub.v1.Subscriber.CreateSubscription". * * Refer to [selector][google.api.DocumentationRule.selector] for syntax * details. * * NOTE: This field must not be set in the proto annotation. It will be * automatically filled by the service config compiler . * </pre> * * <code>string selector = 9;</code> * * @return The bytes for selector. */ @java.lang.Override public com.google.protobuf.ByteString getSelectorBytes() { java.lang.Object ref = selector_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); selector_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int REQUEST_POLICIES_FIELD_NUMBER = 2; @SuppressWarnings("serial") private java.util.List<com.google.api.FieldPolicy> requestPolicies_; /** * * * <pre> * Policies that are applicable to the request message. * </pre> * * <code>repeated .google.api.FieldPolicy request_policies = 2;</code> */ @java.lang.Override public java.util.List<com.google.api.FieldPolicy> getRequestPoliciesList() { return requestPolicies_; } /** * * * <pre> * Policies that are applicable to the request message. * </pre> * * <code>repeated .google.api.FieldPolicy request_policies = 2;</code> */ @java.lang.Override public java.util.List<? extends com.google.api.FieldPolicyOrBuilder> getRequestPoliciesOrBuilderList() { return requestPolicies_; } /** * * * <pre> * Policies that are applicable to the request message. * </pre> * * <code>repeated .google.api.FieldPolicy request_policies = 2;</code> */ @java.lang.Override public int getRequestPoliciesCount() { return requestPolicies_.size(); } /** * * * <pre> * Policies that are applicable to the request message. * </pre> * * <code>repeated .google.api.FieldPolicy request_policies = 2;</code> */ @java.lang.Override public com.google.api.FieldPolicy getRequestPolicies(int index) { return requestPolicies_.get(index); } /** * * * <pre> * Policies that are applicable to the request message. * </pre> * * <code>repeated .google.api.FieldPolicy request_policies = 2;</code> */ @java.lang.Override public com.google.api.FieldPolicyOrBuilder getRequestPoliciesOrBuilder(int index) { return requestPolicies_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < requestPolicies_.size(); i++) { output.writeMessage(2, requestPolicies_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(selector_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 9, selector_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < requestPolicies_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, requestPolicies_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(selector_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(9, selector_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.api.MethodPolicy)) { return super.equals(obj); } com.google.api.MethodPolicy other = (com.google.api.MethodPolicy) obj; if (!getSelector().equals(other.getSelector())) return false; if (!getRequestPoliciesList().equals(other.getRequestPoliciesList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + SELECTOR_FIELD_NUMBER; hash = (53 * hash) + getSelector().hashCode(); if (getRequestPoliciesCount() > 0) { hash = (37 * hash) + REQUEST_POLICIES_FIELD_NUMBER; hash = (53 * hash) + getRequestPoliciesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.api.MethodPolicy parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.api.MethodPolicy parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.api.MethodPolicy parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.api.MethodPolicy parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.api.MethodPolicy parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.api.MethodPolicy parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.api.MethodPolicy parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.api.MethodPolicy parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.api.MethodPolicy parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.api.MethodPolicy parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.api.MethodPolicy parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.api.MethodPolicy parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.api.MethodPolicy prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Defines policies applying to an RPC method. * </pre> * * Protobuf type {@code google.api.MethodPolicy} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.api.MethodPolicy) com.google.api.MethodPolicyOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.api.PolicyProto.internal_static_google_api_MethodPolicy_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.api.PolicyProto.internal_static_google_api_MethodPolicy_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.api.MethodPolicy.class, com.google.api.MethodPolicy.Builder.class); } // Construct using com.google.api.MethodPolicy.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; selector_ = ""; if (requestPoliciesBuilder_ == null) { requestPolicies_ = java.util.Collections.emptyList(); } else { requestPolicies_ = null; requestPoliciesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.api.PolicyProto.internal_static_google_api_MethodPolicy_descriptor; } @java.lang.Override public com.google.api.MethodPolicy getDefaultInstanceForType() { return com.google.api.MethodPolicy.getDefaultInstance(); } @java.lang.Override public com.google.api.MethodPolicy build() { com.google.api.MethodPolicy result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.api.MethodPolicy buildPartial() { com.google.api.MethodPolicy result = new com.google.api.MethodPolicy(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(com.google.api.MethodPolicy result) { if (requestPoliciesBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0)) { requestPolicies_ = java.util.Collections.unmodifiableList(requestPolicies_); bitField0_ = (bitField0_ & ~0x00000002); } result.requestPolicies_ = requestPolicies_; } else { result.requestPolicies_ = requestPoliciesBuilder_.build(); } } private void buildPartial0(com.google.api.MethodPolicy result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.selector_ = selector_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.api.MethodPolicy) { return mergeFrom((com.google.api.MethodPolicy) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.api.MethodPolicy other) { if (other == com.google.api.MethodPolicy.getDefaultInstance()) return this; if (!other.getSelector().isEmpty()) { selector_ = other.selector_; bitField0_ |= 0x00000001; onChanged(); } if (requestPoliciesBuilder_ == null) { if (!other.requestPolicies_.isEmpty()) { if (requestPolicies_.isEmpty()) { requestPolicies_ = other.requestPolicies_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureRequestPoliciesIsMutable(); requestPolicies_.addAll(other.requestPolicies_); } onChanged(); } } else { if (!other.requestPolicies_.isEmpty()) { if (requestPoliciesBuilder_.isEmpty()) { requestPoliciesBuilder_.dispose(); requestPoliciesBuilder_ = null; requestPolicies_ = other.requestPolicies_; bitField0_ = (bitField0_ & ~0x00000002); requestPoliciesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRequestPoliciesFieldBuilder() : null; } else { requestPoliciesBuilder_.addAllMessages(other.requestPolicies_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 18: { com.google.api.FieldPolicy m = input.readMessage(com.google.api.FieldPolicy.parser(), extensionRegistry); if (requestPoliciesBuilder_ == null) { ensureRequestPoliciesIsMutable(); requestPolicies_.add(m); } else { requestPoliciesBuilder_.addMessage(m); } break; } // case 18 case 74: { selector_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 74 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object selector_ = ""; /** * * * <pre> * Selects a method to which these policies should be enforced, for example, * "google.pubsub.v1.Subscriber.CreateSubscription". * * Refer to [selector][google.api.DocumentationRule.selector] for syntax * details. * * NOTE: This field must not be set in the proto annotation. It will be * automatically filled by the service config compiler . * </pre> * * <code>string selector = 9;</code> * * @return The selector. */ public java.lang.String getSelector() { java.lang.Object ref = selector_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); selector_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Selects a method to which these policies should be enforced, for example, * "google.pubsub.v1.Subscriber.CreateSubscription". * * Refer to [selector][google.api.DocumentationRule.selector] for syntax * details. * * NOTE: This field must not be set in the proto annotation. It will be * automatically filled by the service config compiler . * </pre> * * <code>string selector = 9;</code> * * @return The bytes for selector. */ public com.google.protobuf.ByteString getSelectorBytes() { java.lang.Object ref = selector_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); selector_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Selects a method to which these policies should be enforced, for example, * "google.pubsub.v1.Subscriber.CreateSubscription". * * Refer to [selector][google.api.DocumentationRule.selector] for syntax * details. * * NOTE: This field must not be set in the proto annotation. It will be * automatically filled by the service config compiler . * </pre> * * <code>string selector = 9;</code> * * @param value The selector to set. * @return This builder for chaining. */ public Builder setSelector(java.lang.String value) { if (value == null) { throw new NullPointerException(); } selector_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Selects a method to which these policies should be enforced, for example, * "google.pubsub.v1.Subscriber.CreateSubscription". * * Refer to [selector][google.api.DocumentationRule.selector] for syntax * details. * * NOTE: This field must not be set in the proto annotation. It will be * automatically filled by the service config compiler . * </pre> * * <code>string selector = 9;</code> * * @return This builder for chaining. */ public Builder clearSelector() { selector_ = getDefaultInstance().getSelector(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Selects a method to which these policies should be enforced, for example, * "google.pubsub.v1.Subscriber.CreateSubscription". * * Refer to [selector][google.api.DocumentationRule.selector] for syntax * details. * * NOTE: This field must not be set in the proto annotation. It will be * automatically filled by the service config compiler . * </pre> * * <code>string selector = 9;</code> * * @param value The bytes for selector to set. * @return This builder for chaining. */ public Builder setSelectorBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); selector_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.util.List<com.google.api.FieldPolicy> requestPolicies_ = java.util.Collections.emptyList(); private void ensureRequestPoliciesIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { requestPolicies_ = new java.util.ArrayList<com.google.api.FieldPolicy>(requestPolicies_); bitField0_ |= 0x00000002; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.api.FieldPolicy, com.google.api.FieldPolicy.Builder, com.google.api.FieldPolicyOrBuilder> requestPoliciesBuilder_; /** * * * <pre> * Policies that are applicable to the request message. * </pre> * * <code>repeated .google.api.FieldPolicy request_policies = 2;</code> */ public java.util.List<com.google.api.FieldPolicy> getRequestPoliciesList() { if (requestPoliciesBuilder_ == null) { return java.util.Collections.unmodifiableList(requestPolicies_); } else { return requestPoliciesBuilder_.getMessageList(); } } /** * * * <pre> * Policies that are applicable to the request message. * </pre> * * <code>repeated .google.api.FieldPolicy request_policies = 2;</code> */ public int getRequestPoliciesCount() { if (requestPoliciesBuilder_ == null) { return requestPolicies_.size(); } else { return requestPoliciesBuilder_.getCount(); } } /** * * * <pre> * Policies that are applicable to the request message. * </pre> * * <code>repeated .google.api.FieldPolicy request_policies = 2;</code> */ public com.google.api.FieldPolicy getRequestPolicies(int index) { if (requestPoliciesBuilder_ == null) { return requestPolicies_.get(index); } else { return requestPoliciesBuilder_.getMessage(index); } } /** * * * <pre> * Policies that are applicable to the request message. * </pre> * * <code>repeated .google.api.FieldPolicy request_policies = 2;</code> */ public Builder setRequestPolicies(int index, com.google.api.FieldPolicy value) { if (requestPoliciesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRequestPoliciesIsMutable(); requestPolicies_.set(index, value); onChanged(); } else { requestPoliciesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Policies that are applicable to the request message. * </pre> * * <code>repeated .google.api.FieldPolicy request_policies = 2;</code> */ public Builder setRequestPolicies( int index, com.google.api.FieldPolicy.Builder builderForValue) { if (requestPoliciesBuilder_ == null) { ensureRequestPoliciesIsMutable(); requestPolicies_.set(index, builderForValue.build()); onChanged(); } else { requestPoliciesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Policies that are applicable to the request message. * </pre> * * <code>repeated .google.api.FieldPolicy request_policies = 2;</code> */ public Builder addRequestPolicies(com.google.api.FieldPolicy value) { if (requestPoliciesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRequestPoliciesIsMutable(); requestPolicies_.add(value); onChanged(); } else { requestPoliciesBuilder_.addMessage(value); } return this; } /** * * * <pre> * Policies that are applicable to the request message. * </pre> * * <code>repeated .google.api.FieldPolicy request_policies = 2;</code> */ public Builder addRequestPolicies(int index, com.google.api.FieldPolicy value) { if (requestPoliciesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRequestPoliciesIsMutable(); requestPolicies_.add(index, value); onChanged(); } else { requestPoliciesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Policies that are applicable to the request message. * </pre> * * <code>repeated .google.api.FieldPolicy request_policies = 2;</code> */ public Builder addRequestPolicies(com.google.api.FieldPolicy.Builder builderForValue) { if (requestPoliciesBuilder_ == null) { ensureRequestPoliciesIsMutable(); requestPolicies_.add(builderForValue.build()); onChanged(); } else { requestPoliciesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Policies that are applicable to the request message. * </pre> * * <code>repeated .google.api.FieldPolicy request_policies = 2;</code> */ public Builder addRequestPolicies( int index, com.google.api.FieldPolicy.Builder builderForValue) { if (requestPoliciesBuilder_ == null) { ensureRequestPoliciesIsMutable(); requestPolicies_.add(index, builderForValue.build()); onChanged(); } else { requestPoliciesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Policies that are applicable to the request message. * </pre> * * <code>repeated .google.api.FieldPolicy request_policies = 2;</code> */ public Builder addAllRequestPolicies( java.lang.Iterable<? extends com.google.api.FieldPolicy> values) { if (requestPoliciesBuilder_ == null) { ensureRequestPoliciesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, requestPolicies_); onChanged(); } else { requestPoliciesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Policies that are applicable to the request message. * </pre> * * <code>repeated .google.api.FieldPolicy request_policies = 2;</code> */ public Builder clearRequestPolicies() { if (requestPoliciesBuilder_ == null) { requestPolicies_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { requestPoliciesBuilder_.clear(); } return this; } /** * * * <pre> * Policies that are applicable to the request message. * </pre> * * <code>repeated .google.api.FieldPolicy request_policies = 2;</code> */ public Builder removeRequestPolicies(int index) { if (requestPoliciesBuilder_ == null) { ensureRequestPoliciesIsMutable(); requestPolicies_.remove(index); onChanged(); } else { requestPoliciesBuilder_.remove(index); } return this; } /** * * * <pre> * Policies that are applicable to the request message. * </pre> * * <code>repeated .google.api.FieldPolicy request_policies = 2;</code> */ public com.google.api.FieldPolicy.Builder getRequestPoliciesBuilder(int index) { return getRequestPoliciesFieldBuilder().getBuilder(index); } /** * * * <pre> * Policies that are applicable to the request message. * </pre> * * <code>repeated .google.api.FieldPolicy request_policies = 2;</code> */ public com.google.api.FieldPolicyOrBuilder getRequestPoliciesOrBuilder(int index) { if (requestPoliciesBuilder_ == null) { return requestPolicies_.get(index); } else { return requestPoliciesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Policies that are applicable to the request message. * </pre> * * <code>repeated .google.api.FieldPolicy request_policies = 2;</code> */ public java.util.List<? extends com.google.api.FieldPolicyOrBuilder> getRequestPoliciesOrBuilderList() { if (requestPoliciesBuilder_ != null) { return requestPoliciesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(requestPolicies_); } } /** * * * <pre> * Policies that are applicable to the request message. * </pre> * * <code>repeated .google.api.FieldPolicy request_policies = 2;</code> */ public com.google.api.FieldPolicy.Builder addRequestPoliciesBuilder() { return getRequestPoliciesFieldBuilder() .addBuilder(com.google.api.FieldPolicy.getDefaultInstance()); } /** * * * <pre> * Policies that are applicable to the request message. * </pre> * * <code>repeated .google.api.FieldPolicy request_policies = 2;</code> */ public com.google.api.FieldPolicy.Builder addRequestPoliciesBuilder(int index) { return getRequestPoliciesFieldBuilder() .addBuilder(index, com.google.api.FieldPolicy.getDefaultInstance()); } /** * * * <pre> * Policies that are applicable to the request message. * </pre> * * <code>repeated .google.api.FieldPolicy request_policies = 2;</code> */ public java.util.List<com.google.api.FieldPolicy.Builder> getRequestPoliciesBuilderList() { return getRequestPoliciesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.api.FieldPolicy, com.google.api.FieldPolicy.Builder, com.google.api.FieldPolicyOrBuilder> getRequestPoliciesFieldBuilder() { if (requestPoliciesBuilder_ == null) { requestPoliciesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.api.FieldPolicy, com.google.api.FieldPolicy.Builder, com.google.api.FieldPolicyOrBuilder>( requestPolicies_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); requestPolicies_ = null; } return requestPoliciesBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.api.MethodPolicy) } // @@protoc_insertion_point(class_scope:google.api.MethodPolicy) private static final com.google.api.MethodPolicy DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.api.MethodPolicy(); } public static com.google.api.MethodPolicy getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<MethodPolicy> PARSER = new com.google.protobuf.AbstractParser<MethodPolicy>() { @java.lang.Override public MethodPolicy parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<MethodPolicy> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<MethodPolicy> getParserForType() { return PARSER; } @java.lang.Override public com.google.api.MethodPolicy getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/james-project
36,166
mpt/core/src/main/java/org/apache/james/mpt/helper/CreateScript.java
/**************************************************************** * Licensed to the Apache Software Foundation (ASF) under one * * or more contributor license agreements. See the NOTICE file * * distributed with this work for additional information * * regarding copyright ownership. The ASF licenses this file * * to you under the Apache License, Version 2.0 (the * * "License"); you may not use this file except in compliance * * with the License. You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, * * software distributed under the License is distributed on an * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * * KIND, either express or implied. See the License for the * * specific language governing permissions and limitations * * under the License. * ****************************************************************/ package org.apache.james.mpt.helper; public class CreateScript { public static final String RE = "Re:"; public static final String HEADER = "Delivered-To"; public static final String ANOTHER_HEADER = "Received"; public static final String COMMON_LETTER = "o"; public static final String COMMON_WORD = "the"; public static final String UNCOMMON_WORD = "thy"; public static final String UNCOMMON_PHRASE = "\"nothing worthy prove\""; public static final String ANOTHER_NAME = "Robert"; public static final String NAME = "tim"; public static final String DOMAIN = "example.org"; public static final String ANOTHER_DOMAIN = "apache.org"; public static void main(String[] args) throws Exception { ScriptBuilder builder = ScriptBuilder.open("localhost", 143); expunge(builder); } public static void expunge(ScriptBuilder builder) throws Exception { try { setup(builder); builder.append(); builder.setFile("wild-example.mail"); builder.append(); builder.setFile("multipart-alt.mail"); builder.append(); builder.setFile("multipart-mixed-complex.mail"); builder.append(); builder.setFile("rfc822-hello-world.mail"); builder.append(); builder.setFile("wild-mixed-alt.mail"); builder.append(); builder.setFile("wild-mixed.mail"); builder.append(); builder.setFile("rfc822-resent.mail"); builder.append(); builder.setFile("rfc822-trace.mail"); builder.append(); builder.setFile("wild-alt-reply4.mail"); builder.append(); builder.select(); builder.flagDeleted(4); builder.flagDeleted(6); builder.flagDeleted(7); builder.expunge(); builder.select(); } finally { builder.quit(); } } public static void rfcFetch(ScriptBuilder builder) throws Exception { try { setup(builder); builder.append(); builder.setFile("wild-example.mail"); builder.append(); builder.setFile("multipart-alt.mail"); builder.append(); builder.setFile("multipart-mixed-complex.mail"); builder.append(); builder.setFile("rfc822-hello-world.mail"); builder.append(); builder.setFile("wild-mixed-alt.mail"); builder.append(); builder.setFile("wild-mixed.mail"); builder.append(); builder.setFile("rfc822-resent.mail"); builder.append(); builder.setFile("rfc822-trace.mail"); builder.append(); builder.setFile("wild-alt-reply4.mail"); builder.append(); builder.resetFetch().setRfc822Size(true); builder.fetchAllMessages(); builder.resetFetch().setRfc(true); builder.fetchAllMessages(); builder.resetFetch().setRfcHeaders(true); builder.fetchAllMessages(); builder.resetFetch().setRfcText(true); builder.fetchAllMessages(); } finally { builder.quit(); } } public static void bodyStructureEmbedded(ScriptBuilder builder) throws Exception { builder.login(); builder.create(); builder.select(); builder.setFile("multipart-mixed-complex.mail"); builder.append(); builder.fetchSection(""); builder.fetchSection("TEXT"); builder.fetchSection("HEADER"); builder.fetchSection("1"); builder.fetchSection("2"); builder.fetchSection("3"); builder.fetchSection("3.HEADER"); builder.fetchSection("3.TEXT"); builder.fetchSection("3.1"); builder.fetchSection("3.2"); builder.fetchSection("4"); builder.fetchSection("4.1"); builder.fetchSection("4.1.MIME"); builder.fetchSection("4.2"); builder.fetchSection("4.2.HEADER"); builder.fetchSection("4.2.TEXT"); builder.fetchSection("4.2.1"); builder.fetchSection("4.2.2"); builder.fetchSection("4.2.2.1"); builder.fetchSection("4.2.2.2"); builder.resetFetch().setBodyFetch(true).setBodyStructureFetch(true); builder.fetchAllMessages(); builder.quit(); } public static void bodyStructureComplex(ScriptBuilder builder) throws Exception { builder.login(); builder.create(); builder.select(); builder.setFile("wild-alt-reply3.mail"); builder.append(); builder.setFile("wild-alt-reply4.mail"); builder.append(); builder.setFile("multipart-mixed.mail"); builder.append(); builder.setFile("wild-mixed-alt.mail"); builder.append(); builder.setFile("wild-mixed.mail"); builder.append(); builder.setFile("mime-plain-text.mail"); builder.append(); builder.fetchAllMessages(); for (int i = 1; i < 7; i++) { builder.setMessageNumber(i); builder.fetchSection(""); builder.fetchSection("TEXT"); builder.fetchSection("HEADER"); builder.fetchSection("1"); builder.fetchSection("2"); builder.fetchSection("3"); builder.fetchSection("3.HEADER"); builder.fetchSection("3.TEXT"); builder.fetchSection("3.1"); builder.fetchSection("3.2"); builder.fetchSection("4"); builder.fetchSection("4.1"); builder.fetchSection("4.1.MIME"); builder.fetchSection("4.2"); builder.fetchSection("4.2.HEADER"); builder.fetchSection("4.2.TEXT"); builder.fetchSection("4.2.1"); builder.fetchSection("4.2.2"); builder.fetchSection("4.2.2.1"); builder.fetchSection("4.2.2.2"); } builder.resetFetch().setBodyFetch(true).setBodyStructureFetch(true); builder.fetchAllMessages(); builder.quit(); } public static void bodyStructureSimple(ScriptBuilder builder) throws Exception { builder.login(); builder.create(); builder.select(); builder.setFile("rfc822-multiple-addresses.mail"); builder.append(); builder.setFile("wild-example.mail"); builder.append(); builder.setFile("mime-plain-text.mail"); builder.append(); builder.fetchAllMessages(); builder.fetchSection(""); builder.fetchSection("TEXT"); builder.fetchSection("HEADER"); builder.fetchSection("1"); builder.fetchSection("2"); builder.fetchSection("3"); builder.fetchSection("3.HEADER"); builder.fetchSection("3.TEXT"); builder.fetchSection("3.1"); builder.fetchSection("3.2"); builder.fetchSection("4"); builder.fetchSection("4.1"); builder.fetchSection("4.1.MIME"); builder.fetchSection("4.2"); builder.fetchSection("4.2.HEADER"); builder.fetchSection("4.2.TEXT"); builder.fetchSection("4.2.1"); builder.fetchSection("4.2.2"); builder.fetchSection("4.2.2.1"); builder.fetchSection("4.2.2.2"); builder.resetFetch().setBodyFetch(true).setBodyStructureFetch(true); builder.fetchAllMessages(); builder.quit(); } public static void bodyStructureMultipart(ScriptBuilder builder) throws Exception { builder.login(); builder.create(); builder.select(); builder.setFile("multipart-binary.mail"); builder.append(); builder.setFile("multipart-alt-translation.mail"); builder.append(); builder.fetchAllMessages(); builder.fetchSection(""); builder.fetchSection("TEXT"); builder.fetchSection("HEADER"); builder.fetchSection("1"); builder.fetchSection("2"); builder.fetchSection("3"); builder.fetchSection("3.HEADER"); builder.fetchSection("3.TEXT"); builder.fetchSection("3.1"); builder.fetchSection("3.2"); builder.fetchSection("4"); builder.fetchSection("4.1"); builder.fetchSection("4.1.MIME"); builder.fetchSection("4.2"); builder.fetchSection("4.2.HEADER"); builder.fetchSection("4.2.TEXT"); builder.fetchSection("4.2.1"); builder.fetchSection("4.2.2"); builder.fetchSection("4.2.2.1"); builder.fetchSection("4.2.2.2"); builder.resetFetch().setBodyFetch(true).setBodyStructureFetch(true); builder.fetchAllMessages(); builder.quit(); } public static void renameSelected(ScriptBuilder builder) throws Exception { builder.login(); builder.create(); builder.select(); builder.append(); builder.setFile("rfc822-hello-world.mail"); builder.append(); builder.setFile("rfc822-sender.mail"); builder.append(); builder.setFile("rfc822.mail"); builder.append(); builder.setFile("rfc822-multiple-addresses.mail"); builder.append(); builder.select(); builder.getFetch().setFlagsFetch(true).bodyPeekHeaders( ScriptBuilder.Fetch.SELECT_HEADERS).setUid(true); builder.fetchAllMessages(); builder.list(); builder.rename("anothermailbox"); builder.list(); builder.fetchAllMessages(); builder.store(builder.flags().add().flagged().range(1, 2)); builder.store(builder.flags().add().answered().range(1, 3)); builder.fetchAllMessages(); builder.select(); builder.setMailbox("anothermailbox"); builder.select(); builder.fetchAllMessages(); builder.quit(); } public static void renameHierarchy(ScriptBuilder builder) throws Exception { builder.login(); builder.setMailbox("one").create(); builder.setMailbox("one.two").create(); builder.setMailbox("one.two.three").create(); builder.setMailbox("one.two.three.four").create(); builder.list(); builder.rename("one.two", "alpha.beta"); builder.list(); builder.rename("alpha.beta.three.four", "alpha.beta.gamma.delta"); builder.list(); builder.rename("alpha.beta.three", "aleph"); builder.list(); builder.rename("aleph", "alpha.beta.gamma.delta.epsilon"); builder.list(); builder.rename("alpha.beta.gamma", "one"); builder.list(); builder.setMailbox("one").delete(); builder.setMailbox("alpha").delete(); builder.setMailbox("aleph"); builder.list(); builder.quit(); } public static void rename(ScriptBuilder builder) throws Exception { setupSearch(builder, true); builder.select(); String originalMailbox = builder.getMailbox(); builder.getFetch().setFlagsFetch(true).bodyPeekHeaders( ScriptBuilder.Fetch.SELECT_HEADERS).setUid(true); builder.fetchAllMessages(); builder.setMailbox("other").create().select().append(); builder.setMailbox("base").create().select(); builder.rename(originalMailbox, "moved").setMailbox("moved").select() .fetchAllMessages(); builder.setMailbox(originalMailbox).select(); builder.rename("other", "base"); builder.setMailbox(originalMailbox).select(); builder.setMailbox("moved").select(); builder.setMailbox("other").select(); builder.setMailbox("base").select(); builder.setMailbox("BOGUS").select(); builder.setMailbox("WHATEVER").select(); builder.rename("other", originalMailbox); builder.setMailbox(originalMailbox).select(); builder.setMailbox("moved").select(); builder.setMailbox("other").select(); builder.setMailbox("base").select(); builder.setMailbox("BOGUS").select(); builder.setMailbox("WHATEVER").select(); builder.rename("BOGUS", "WHATEVER"); builder.rename(originalMailbox, "INBOX"); builder.rename(originalMailbox, "inbox"); builder.rename(originalMailbox, "Inbox"); builder.setMailbox(originalMailbox).select(); builder.setMailbox("moved").select(); builder.setMailbox("other").select(); builder.setMailbox("base").select(); builder.setMailbox("BOGUS").select(); builder.setMailbox("WHATEVER").select(); builder.setMailbox("BOGUS").delete(); builder.setMailbox("WHATEVER").delete(); builder.setMailbox(originalMailbox).delete(); builder.setMailbox("base").delete(); builder.setMailbox("other").delete(); builder.setMailbox("moved").delete(); builder.quit(); } public static void mimePartialFetch(ScriptBuilder builder) throws Exception { builder.login(); builder.create(); builder.select(); builder.setFile("multipart-mixed-complex.mail"); builder.append(); builder.select(); builder.partial(0, 0).fetchSection("4.1.MIME"); builder.partial(0, 15).fetchSection("4.1.MIME"); builder.partial(0, 31).fetchSection("4.1.MIME"); builder.partial(0, 63).fetchSection("4.1.MIME"); builder.partial(0, 127).fetchSection("4.1.MIME"); builder.partial(0, 1023).fetchSection("4.1.MIME"); builder.partial(0, 2047).fetchSection("4.1.MIME"); builder.partial(17, 0).fetchSection("4.1.MIME"); builder.partial(17, 16).fetchSection("4.1.MIME"); builder.partial(17, 32).fetchSection("4.1.MIME"); builder.partial(17, 64).fetchSection("4.1.MIME"); builder.partial(17, 128).fetchSection("4.1.MIME"); builder.partial(17, 1024).fetchSection("4.1.MIME"); builder.partial(17, 2048).fetchSection("4.1.MIME"); builder.partial(10000, 0).fetchSection("4.1.MIME"); builder.partial(10000, 16).fetchSection("4.1.MIME"); builder.partial(10000, 32).fetchSection("4.1.MIME"); builder.partial(10000, 64).fetchSection("4.1.MIME"); builder.partial(10000, 128).fetchSection("4.1.MIME"); builder.partial(10000, 1024).fetchSection("4.1.MIME"); builder.partial(10000, 2048).fetchSection("4.1.MIME"); builder.quit(); } public static void headerPartialFetch(ScriptBuilder builder) throws Exception { builder.login(); builder.create(); builder.select(); builder.setFile("multipart-mixed-complex.mail"); builder.append(); builder.select(); builder.partial(0, 0).fetchSection("HEADER"); builder.partial(0, 16).fetchSection("HEADER"); builder.partial(0, 32).fetchSection("HEADER"); builder.partial(0, 64).fetchSection("HEADER"); builder.partial(0, 128).fetchSection("HEADER"); builder.partial(0, 1024).fetchSection("HEADER"); builder.partial(0, 2048).fetchSection("HEADER"); builder.partial(7, 0).fetchSection("HEADER"); builder.partial(7, 16).fetchSection("HEADER"); builder.partial(7, 32).fetchSection("HEADER"); builder.partial(7, 64).fetchSection("HEADER"); builder.partial(7, 128).fetchSection("HEADER"); builder.partial(7, 1024).fetchSection("HEADER"); builder.partial(7, 2048).fetchSection("HEADER"); builder.partial(10000, 0).fetchSection("HEADER"); builder.partial(10000, 16).fetchSection("HEADER"); builder.partial(10000, 32).fetchSection("HEADER"); builder.partial(10000, 64).fetchSection("HEADER"); builder.partial(10000, 128).fetchSection("HEADER"); builder.partial(10000, 1024).fetchSection("HEADER"); builder.partial(10000, 2048).fetchSection("HEADER"); builder.quit(); } public static void textPartialFetch(ScriptBuilder builder) throws Exception { builder.login(); builder.create(); builder.select(); builder.setFile("multipart-mixed-complex.mail"); builder.append(); builder.select(); builder.partial(0, 0).fetchSection("TEXT"); builder.partial(0, 16).fetchSection("TEXT"); builder.partial(0, 32).fetchSection("TEXT"); builder.partial(0, 64).fetchSection("TEXT"); builder.partial(0, 128).fetchSection("TEXT"); builder.partial(0, 1024).fetchSection("TEXT"); builder.partial(0, 2048).fetchSection("TEXT"); builder.partial(7, 0).fetchSection("TEXT"); builder.partial(7, 16).fetchSection("TEXT"); builder.partial(7, 32).fetchSection("TEXT"); builder.partial(7, 64).fetchSection("TEXT"); builder.partial(7, 128).fetchSection("TEXT"); builder.partial(7, 1024).fetchSection("TEXT"); builder.partial(7, 2048).fetchSection("TEXT"); builder.partial(10000, 0).fetchSection("TEXT"); builder.partial(10000, 16).fetchSection("TEXT"); builder.partial(10000, 32).fetchSection("TEXT"); builder.partial(10000, 64).fetchSection("TEXT"); builder.partial(10000, 128).fetchSection("TEXT"); builder.partial(10000, 1024).fetchSection("TEXT"); builder.partial(10000, 2048).fetchSection("TEXT"); builder.quit(); } public static void bodyPartialFetch(ScriptBuilder builder) throws Exception { builder.login(); builder.create(); builder.select(); builder.setFile("multipart-mixed.mail"); builder.append(); builder.select(); builder.partial(0, 10).fetchSection(""); builder.partial(0, 100).fetchSection(""); builder.partial(0, 1000).fetchSection(""); builder.partial(0, 10000).fetchSection(""); builder.partial(0, 100000).fetchSection(""); builder.partial(100, 10).fetchSection(""); builder.partial(100, 100).fetchSection(""); builder.partial(100, 1000).fetchSection(""); builder.partial(100, 10000).fetchSection(""); builder.partial(100, 100000).fetchSection(""); builder.partial(10000, 10).fetchSection(""); builder.partial(10000, 100).fetchSection(""); builder.partial(10000, 1000).fetchSection(""); builder.partial(10000, 10000).fetchSection(""); builder.partial(10000, 100000).fetchSection(""); builder.quit(); } public static void searchCombinations(ScriptBuilder builder, boolean uids) throws Exception { setupSearch(builder, uids); builder.body(COMMON_LETTER).undraft().unflagged().answered().search(); builder.to(COMMON_LETTER).draft().flagged().answered().search(); builder.to(COMMON_LETTER).smaller(10000).all().draft().search(); builder.bcc(COMMON_LETTER).larger(1000).search(); builder.from(COMMON_LETTER).larger(1000).flagged().search(); builder.from(COMMON_LETTER).to(COMMON_LETTER).answered().flagged() .all().body(COMMON_LETTER).sentbefore(2009, 1, 1).search(); builder.or().openParen().from(COMMON_LETTER).to(COMMON_LETTER) .answered().flagged().all().body(COMMON_LETTER).sentbefore( 2009, 1, 1).closeParen().openParen().header(HEADER, "\"\"").draft().closeParen().search(); builder.or().openParen().cc(COMMON_LETTER).text(COMMON_LETTER).unseen() .larger(1000).all().body(COMMON_LETTER).senton(2008, 4, 8) .closeParen().openParen().header(HEADER, "\"\"").draft() .closeParen().search(); builder.or().openParen().cc(COMMON_LETTER).to(COMMON_LETTER).draft() .unseen().all().text(COMMON_LETTER).sentsince(2000, 1, 1) .closeParen().openParen().header(HEADER, "\"\"").draft() .closeParen().search(); builder.or().openParen().or().openParen().or().openParen().not().text( COMMON_LETTER).cc(COMMON_LETTER).unseen().flagged().all().body( COMMON_LETTER).not().senton(2008, 3, 1).closeParen() .openParen().header(HEADER, DOMAIN).flagged().closeParen() .closeParen().openParen().from(COMMON_LETTER).to(COMMON_LETTER) .answered().flagged().all().body(COMMON_LETTER).sentbefore( 2009, 1, 1).closeParen().closeParen().openParen() .answered().flagged().draft().closeParen().all().deleted() .search(); builder.or().openParen().or().openParen().or().openParen().from( COMMON_LETTER).to(COMMON_LETTER).answered().flagged().all() .body(COMMON_LETTER).sentbefore(2009, 1, 1).closeParen() .openParen().header(HEADER, "\"\"").draft().closeParen() .closeParen().openParen().from(COMMON_LETTER).to(COMMON_LETTER) .answered().flagged().all().body(COMMON_LETTER).sentbefore( 2009, 1, 1).closeParen().closeParen().openParen() .answered().flagged().draft().closeParen().all().unanswered() .search(); builder.quit(); } public static void searchAtoms(ScriptBuilder builder, boolean uids) throws Exception { setupSearch(builder, uids); builder.all().search(); builder.answered().search(); builder.bcc(COMMON_LETTER).search(); builder.bcc(NAME).search(); builder.bcc(ANOTHER_NAME).search(); builder.bcc(DOMAIN).search(); builder.bcc(ANOTHER_DOMAIN).search(); builder.body(COMMON_LETTER).search(); builder.body(COMMON_WORD).search(); builder.body(UNCOMMON_WORD).search(); builder.body(UNCOMMON_PHRASE).search(); builder.cc(COMMON_LETTER).search(); builder.cc(NAME).search(); builder.cc(ANOTHER_NAME).search(); builder.cc(DOMAIN).search(); builder.cc(ANOTHER_DOMAIN).search(); builder.deleted().search(); builder.draft().search(); builder.flagged().search(); builder.from(COMMON_LETTER).search(); builder.from(NAME).search(); builder.from(ANOTHER_NAME).search(); builder.from(DOMAIN).search(); builder.from(ANOTHER_DOMAIN).search(); builder.header(HEADER, DOMAIN).search(); builder.header(HEADER, COMMON_LETTER).search(); builder.header(HEADER, ANOTHER_DOMAIN).search(); builder.header(HEADER, "\"\"").search(); builder.header(ANOTHER_HEADER, DOMAIN).search(); builder.header(ANOTHER_HEADER, COMMON_LETTER).search(); builder.header(ANOTHER_HEADER, ANOTHER_DOMAIN).search(); builder.header(ANOTHER_HEADER, "\"\"").search(); builder.larger(10).search(); builder.larger(100).search(); builder.larger(1000).search(); builder.larger(10000).search(); builder.larger(12500).search(); builder.larger(15000).search(); builder.larger(20000).search(); builder.newOperator().search(); builder.not().flagged().search(); builder.msn(3, 5).search(); builder.msnAndDown(10).search(); builder.msnAndUp(17).search(); builder.old().search(); builder.or().answered().flagged().search(); builder.recent().search(); builder.seen().search(); builder.sentbefore(2007, 10, 10).search(); builder.sentbefore(2008, 1, 1).search(); builder.sentbefore(2008, 2, 1).search(); builder.sentbefore(2008, 2, 10).search(); builder.sentbefore(2008, 2, 20).search(); builder.sentbefore(2008, 2, 25).search(); builder.sentbefore(2008, 3, 1).search(); builder.sentbefore(2008, 3, 5).search(); builder.sentbefore(2008, 3, 10).search(); builder.sentbefore(2008, 4, 1).search(); builder.senton(2007, 10, 10).search(); builder.senton(2008, 1, 1).search(); builder.senton(2008, 2, 1).search(); builder.senton(2008, 2, 10).search(); builder.senton(2008, 2, 20).search(); builder.senton(2008, 2, 25).search(); builder.senton(2008, 3, 1).search(); builder.senton(2008, 3, 5).search(); builder.senton(2008, 3, 10).search(); builder.senton(2008, 4, 1).search(); builder.sentsince(2007, 10, 10).search(); builder.sentsince(2008, 1, 1).search(); builder.sentsince(2008, 2, 1).search(); builder.sentsince(2008, 2, 10).search(); builder.sentsince(2008, 2, 20).search(); builder.sentsince(2008, 2, 25).search(); builder.sentsince(2008, 3, 1).search(); builder.sentsince(2008, 3, 5).search(); builder.sentsince(2008, 3, 10).search(); builder.sentsince(2008, 4, 1).search(); builder.smaller(10).search(); builder.smaller(100).search(); builder.smaller(1000).search(); builder.smaller(10000).search(); builder.smaller(12500).search(); builder.smaller(15000).search(); builder.smaller(20000).search(); builder.subject(COMMON_LETTER).search(); builder.subject(COMMON_WORD).search(); builder.subject(UNCOMMON_PHRASE).search(); builder.subject(UNCOMMON_WORD).search(); builder.subject(RE).search(); builder.text(COMMON_LETTER).search(); builder.text(COMMON_WORD).search(); builder.text(UNCOMMON_PHRASE).search(); builder.text(UNCOMMON_WORD).search(); builder.text(RE).search(); builder.text(DOMAIN).search(); builder.text(ANOTHER_DOMAIN).search(); builder.text(ANOTHER_NAME).search(); builder.text(NAME).search(); builder.to(COMMON_LETTER).search(); builder.to(NAME).search(); builder.to(ANOTHER_NAME).search(); builder.to(DOMAIN).search(); builder.to(ANOTHER_DOMAIN).search(); builder.uid().msn(1, 4).search(); builder.unanswered().search(); builder.undeleted().search(); builder.undraft().search(); builder.unflagged().search(); builder.unseen().search(); builder.quit(); } private static void setupSearch(ScriptBuilder builder, boolean uids) throws Exception { builder.setUidSearch(uids); setup(builder); padUids(builder); loadLotsOfMail(builder); builder.store(builder.flags().add().flagged().range(1, 9)); builder.store(builder.flags().add().answered().range(1, 4)); builder.store(builder.flags().add().answered().range(10, 14)); builder.store(builder.flags().add().seen().range(1, 2)); builder.store(builder.flags().add().seen().range(5, 7)); builder.store(builder.flags().add().seen().range(10, 12)); builder.store(builder.flags().add().seen().range(15, 17)); builder.store(builder.flags().add().draft().msn(1)); builder.store(builder.flags().add().draft().msn(3)); builder.store(builder.flags().add().draft().msn(5)); builder.store(builder.flags().add().draft().msn(7)); builder.store(builder.flags().add().draft().msn(9)); builder.store(builder.flags().add().draft().msn(11)); builder.store(builder.flags().add().draft().msn(13)); builder.store(builder.flags().add().draft().msn(15)); builder.store(builder.flags().add().draft().msn(17)); builder.store(builder.flags().add().deleted().range(1, 3)); } private static void setup(ScriptBuilder builder) throws Exception { builder.login(); builder.create(); builder.select(); } private static void padUids(ScriptBuilder builder) throws Exception { builder.setFile("rfc822.mail"); for (int i = 0; i < 20; i++) { builder.append(); builder.flagDeleted().expunge(); } } private static void loadLotsOfMail(ScriptBuilder builder) throws Exception { builder.append(); builder.setFile("wild-example.mail"); builder.append(); builder.setFile("multipart-alt.mail"); builder.append(); builder.setFile("multipart-mixed.mail"); builder.append(); builder.setFile("multipart-mixed-complex.mail"); builder.append(); builder.setFile("rfc822-hello-world.mail"); builder.append(); builder.setFile("rfc822-sender.mail"); builder.append(); builder.setFile("rfc822.mail"); builder.append(); builder.setFile("rfc822-multiple-addresses.mail"); builder.append(); builder.setFile("wild-alt-reply.mail"); builder.append(); builder.setFile("wild-mixed-alt.mail"); builder.append(); builder.setFile("wild-mixed.mail"); builder.append(); builder.setFile("rfc822-reply.mail"); builder.append(); builder.setFile("rfc822-resent.mail"); builder.append(); builder.setFile("rfc822-trace.mail"); builder.append(); builder.setFile("rfc822-group-addresses.mail"); builder.append(); builder.setFile("wild-alt-another-reply.mail"); builder.append(); builder.setFile("wild-alt-reply3.mail"); builder.append(); builder.setFile("wild-alt-reply4.mail"); builder.append(); } public static void notHeaderFetches(ScriptBuilder builder) throws Exception { builder.login(); builder.create(); builder.select(); builder.append(); builder.setFile("wild-example.mail"); builder.append(); builder.setFile("multipart-alt.mail"); builder.append(); builder.setFile("multipart-mixed.mail"); builder.append(); builder.setFile("multipart-mixed-complex.mail"); builder.append(); builder.setFile("rfc822-hello-world.mail"); builder.append(); builder.setFile("rfc822-sender.mail"); builder.append(); builder.setFile("rfc822.mail"); builder.append(); builder.setFile("rfc822-multiple-addresses.mail"); builder.append(); builder.select(); builder.getFetch().bodyPeekCompleteMessage(); builder.fetchAllMessages(); builder.resetFetch(); builder.getFetch().bodyPeekNotHeaders( ScriptBuilder.Fetch.SELECT_HEADERS); builder.fetchAllMessages(); builder.select(); builder.quit(); } public static void simpleCombinedFetches(ScriptBuilder builder) throws Exception { builder.login(); builder.create(); builder.select(); builder.append(); builder.setFile("wild-example.mail"); builder.append(); builder.setFile("multipart-alt.mail"); builder.append(); builder.setFile("multipart-mixed.mail"); builder.append(); builder.setFile("multipart-mixed-complex.mail"); builder.append(); builder.setFile("rfc822-hello-world.mail"); builder.append(); builder.setFile("rfc822-sender.mail"); builder.append(); builder.setFile("rfc822.mail"); builder.append(); builder.setFile("rfc822-multiple-addresses.mail"); builder.append(); builder.select(); builder.getFetch().bodyPeekCompleteMessage(); builder.fetchAllMessages(); builder.resetFetch(); builder.getFetch().bodyPeekHeaders( ScriptBuilder.Fetch.COMPREHENSIVE_HEADERS); builder.fetchAllMessages(); builder.select(); builder.quit(); } public static void recent(ScriptBuilder builder) throws Exception { builder.login(); builder.create(); builder.select(); builder.append(); builder.select(); builder.fetchFlags(); builder.fetchSection(""); builder.fetchFlags(); builder.quit(); } public static void multipartMixedMessagesPeek(ScriptBuilder builder) throws Exception { builder.setPeek(true); multipartMixedMessages(builder); } public static void multipartMixedMessages(ScriptBuilder builder) throws Exception { builder.login(); builder.create(); builder.select(); builder.setFile("multipart-mixed-complex.mail"); builder.append(); builder.select(); builder.fetchSection(""); builder.fetchSection("TEXT"); builder.fetchSection("HEADER"); builder.fetchSection("1"); builder.fetchSection("2"); builder.fetchSection("3"); builder.fetchSection("3.HEADER"); builder.fetchSection("3.TEXT"); builder.fetchSection("3.1"); builder.fetchSection("3.2"); builder.fetchSection("4"); builder.fetchSection("4.1"); builder.fetchSection("4.1.MIME"); builder.fetchSection("4.2"); builder.fetchSection("4.2.HEADER"); builder.fetchSection("4.2.TEXT"); builder.fetchSection("4.2.1"); builder.fetchSection("4.2.2"); builder.fetchSection("4.2.2.1"); builder.fetchSection("4.2.2.2"); builder.select(); builder.quit(); } public static void multipartAlternativePeek(ScriptBuilder builder) throws Exception { builder.setPeek(true); multipartAlternative(builder); } public static void multipartAlternative(ScriptBuilder builder) throws Exception { builder.login(); builder.create(); builder.select(); builder.setFile("multipart-alt.mail"); builder.append(); builder.select(); builder.fetchSection(""); builder.fetchSection("TEXT"); builder.fetchSection("HEADER"); builder.fetchSection("1"); builder.fetchSection("2"); builder.fetchSection("3"); builder.select(); builder.quit(); } public static void multipartMixedPeek(ScriptBuilder builder) throws Exception { builder.setPeek(true); multipartMixed(builder); } public static void multipartMixed(ScriptBuilder builder) throws Exception { builder.login(); builder.create(); builder.select(); builder.setFile("multipart-mixed.mail"); builder.append(); builder.select(); builder.fetchSection(""); builder.fetchSection("TEXT"); builder.fetchSection("HEADER"); builder.fetchSection("1"); builder.fetchSection("2"); builder.fetchSection("3"); builder.fetchSection("4"); builder.select(); builder.quit(); } }
apache/hive
36,906
ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.optimizer.calcite.translator; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList.Builder; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import java.nio.charset.Charset; import org.apache.calcite.avatica.util.TimeUnit; import org.apache.calcite.avatica.util.TimeUnitRange; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeFactory; import org.apache.calcite.rex.RexBuilder; import org.apache.calcite.rex.RexCall; import org.apache.calcite.rex.RexLiteral; import org.apache.calcite.rex.RexNode; import org.apache.calcite.rex.RexUtil; import org.apache.calcite.sql.SqlBinaryOperator; import org.apache.calcite.sql.SqlCollation; import org.apache.calcite.sql.SqlIntervalQualifier; import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql.SqlOperator; import org.apache.calcite.sql.fun.SqlCastFunction; import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.sql.parser.SqlParserPos; import org.apache.calcite.sql.type.SqlTypeName; import org.apache.calcite.sql.type.SqlTypeUtil; import org.apache.calcite.util.ConversionUtil; import org.apache.calcite.util.DateString; import org.apache.calcite.util.TimestampString; import org.apache.hadoop.hive.common.type.Date; import org.apache.hadoop.hive.common.type.Decimal128; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.common.type.Timestamp; import org.apache.hadoop.hive.common.type.TimestampTZ; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException; import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException.UnsupportedFeature; import org.apache.hadoop.hive.ql.optimizer.calcite.HiveCalciteUtil; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveComponentAccess; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveExtractDate; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveFloorDate; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveIn; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveToDateSqlOperator; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.parse.type.ExprNodeTypeCheck; import org.apache.hadoop.hive.ql.parse.type.RexNodeExprFactory; import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDescUtils; import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBaseBinary; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBaseCompare; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBetween; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFTimestamp; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToArray; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToBinary; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToChar; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToDate; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToDecimal; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToMap; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToString; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToStruct; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToTimestampLocalTZ; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToUnixTimeStamp; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToVarchar; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFUnixTimeStamp; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFWhen; import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import java.math.BigDecimal; import java.time.Instant; import java.util.ArrayList; import java.util.Calendar; import java.util.Collections; import java.util.List; /** * Class that contains logic to translate Hive expressions ({@link ExprNodeDesc}) * into Calcite expressions ({@link RexNode}). */ public class RexNodeConverter { private final RexBuilder rexBuilder; private final RelDataTypeFactory typeFactory; /** * Constructor used by HiveRexExecutorImpl. */ public RexNodeConverter(RexBuilder rexBuilder) { this.rexBuilder = rexBuilder; this.typeFactory = rexBuilder.getTypeFactory(); } public RexNode convert(ExprNodeDesc expr) throws SemanticException { if (expr instanceof ExprNodeGenericFuncDesc) { return convert((ExprNodeGenericFuncDesc) expr); } else if (expr instanceof ExprNodeConstantDesc) { return convert((ExprNodeConstantDesc) expr); } else if (expr instanceof ExprNodeFieldDesc) { return convert((ExprNodeFieldDesc) expr); } else { throw new RuntimeException("Unsupported Expression"); } // TODO: Handle ExprNodeColumnDesc, ExprNodeColumnListDesc } private RexNode convert(final ExprNodeFieldDesc fieldDesc) throws SemanticException { RexNode rexNode = convert(fieldDesc.getDesc()); if (rexNode.getType().isStruct()) { // regular case of accessing nested field in a column return rexBuilder.makeFieldAccess(rexNode, fieldDesc.getFieldName(), true); } else if (rexNode.getType().getComponentType() != null) { return rexBuilder.makeCall(rexNode.getType().getComponentType(), HiveComponentAccess.COMPONENT_ACCESS, Collections.singletonList(rexNode)); } else { throw new CalciteSemanticException("Unexpected rexnode : " + rexNode.getClass().getCanonicalName()); } } private RexNode convert(ExprNodeGenericFuncDesc func) throws SemanticException { ExprNodeDesc tmpExprNode; RexNode tmpRN; List<RexNode> childRexNodeLst = new ArrayList<>(); Builder<RelDataType> argTypeBldr = ImmutableList.<RelDataType> builder(); // TODO: 1) Expand to other functions as needed 2) What about types other than primitive. TypeInfo tgtDT = null; GenericUDF tgtUdf = func.getGenericUDF(); boolean isNumeric = (tgtUdf instanceof GenericUDFBaseBinary && func.getTypeInfo().getCategory() == Category.PRIMITIVE && (PrimitiveGrouping.NUMERIC_GROUP == PrimitiveObjectInspectorUtils.getPrimitiveGrouping( ((PrimitiveTypeInfo) func.getTypeInfo()).getPrimitiveCategory()))); boolean isCompare = !isNumeric && tgtUdf instanceof GenericUDFBaseCompare; boolean isWhenCase = tgtUdf instanceof GenericUDFWhen; boolean isTransformableTimeStamp = func.getGenericUDF() instanceof GenericUDFUnixTimeStamp && !func.getChildren().isEmpty(); boolean isBetween = !isNumeric && tgtUdf instanceof GenericUDFBetween; boolean isIN = !isNumeric && tgtUdf instanceof GenericUDFIn; boolean isAllPrimitive = true; if (isNumeric) { tgtDT = func.getTypeInfo(); assert func.getChildren().size() == 2; // TODO: checking 2 children is useless, compare already does that. } else if (isCompare && (func.getChildren().size() == 2)) { tgtDT = FunctionRegistry.getCommonClassForComparison(func.getChildren().get(0) .getTypeInfo(), func.getChildren().get(1).getTypeInfo()); } else if (isWhenCase) { // If it is a CASE or WHEN, we need to check that children do not contain stateful functions // as they are not allowed if (checkForStatefulFunctions(func.getChildren())) { throw new SemanticException("Stateful expressions cannot be used inside of CASE"); } } else if (isTransformableTimeStamp) { // unix_timestamp(args) -> to_unix_timestamp(args) func = ExprNodeGenericFuncDesc.newInstance(new GenericUDFToUnixTimeStamp(), func.getChildren()); } else if (isBetween) { assert func.getChildren().size() == 4; // We skip first child as is not involved (is the revert boolean) // The target type needs to account for all 3 operands tgtDT = FunctionRegistry.getCommonClassForComparison( func.getChildren().get(1).getTypeInfo(), FunctionRegistry.getCommonClassForComparison( func.getChildren().get(2).getTypeInfo(), func.getChildren().get(3).getTypeInfo())); } else if (isIN) { // We're only considering the first element of the IN list for the type assert func.getChildren().size() > 1; tgtDT = FunctionRegistry.getCommonClassForComparison(func.getChildren().get(0) .getTypeInfo(), func.getChildren().get(1).getTypeInfo()); } for (int i =0; i < func.getChildren().size(); ++i) { ExprNodeDesc childExpr = func.getChildren().get(i); tmpExprNode = childExpr; if (tgtDT != null && tgtDT.getCategory() == Category.PRIMITIVE && TypeInfoUtils.isConversionRequiredForComparison(tgtDT, childExpr.getTypeInfo())) { if (isCompare || isBetween || isIN) { // For compare, we will convert requisite children // For BETWEEN skip the first child (the revert boolean) if (!isBetween || i > 0) { tmpExprNode = ExprNodeTypeCheck.getExprNodeDefaultExprProcessor() .createConversionCast(childExpr, (PrimitiveTypeInfo) tgtDT); } } else if (isNumeric) { // For numeric, we'll do minimum necessary cast - if we cast to the type // of expression, bad things will happen. PrimitiveTypeInfo minArgType = ExprNodeDescUtils.deriveMinArgumentCast(childExpr, tgtDT); tmpExprNode = ExprNodeTypeCheck.getExprNodeDefaultExprProcessor() .createConversionCast(childExpr, minArgType); } else { throw new AssertionError("Unexpected " + tgtDT + " - not a numeric op or compare"); } } isAllPrimitive = isAllPrimitive && tmpExprNode.getTypeInfo().getCategory() == Category.PRIMITIVE; argTypeBldr.add(TypeConverter.convert(tmpExprNode.getTypeInfo(), typeFactory)); tmpRN = convert(tmpExprNode); childRexNodeLst.add(tmpRN); } // See if this is an explicit cast. RelDataType retType = TypeConverter.convert(func.getTypeInfo(), typeFactory); RexNode expr = handleExplicitCast(func.getGenericUDF(), retType, childRexNodeLst, rexBuilder); if (expr == null) { // This is not a cast; process the function. SqlOperator calciteOp = SqlFunctionConverter.getCalciteOperator(func.getFuncText(), func.getGenericUDF(), argTypeBldr.build(), retType); if (calciteOp.getKind() == SqlKind.CASE) { // If it is a case operator, we need to rewrite it childRexNodeLst = rewriteCaseChildren(childRexNodeLst, rexBuilder); // Adjust branch types by inserting explicit casts if the actual is ambiguous childRexNodeLst = adjustCaseBranchTypes(childRexNodeLst, retType, rexBuilder); } else if (HiveExtractDate.ALL_FUNCTIONS.contains(calciteOp)) { // If it is a extract operator, we need to rewrite it childRexNodeLst = rewriteExtractDateChildren(calciteOp, childRexNodeLst, rexBuilder); } else if (HiveFloorDate.ALL_FUNCTIONS.contains(calciteOp)) { // If it is a floor <date> operator, we need to rewrite it childRexNodeLst = rewriteFloorDateChildren(calciteOp, childRexNodeLst, rexBuilder); } else if (HiveIn.INSTANCE.equals(calciteOp) && isAllPrimitive) { if (childRexNodeLst.size() == 2) { // if it is a single item in an IN clause, transform A IN (B) to A = B // from IN [A,B] => EQUALS [A,B] // except complex types calciteOp = SqlStdOperatorTable.EQUALS; } else if (RexUtil.isReferenceOrAccess(childRexNodeLst.get(0), true)){ // if it is more than an single item in an IN clause, // transform from IN [A,B,C] => OR [EQUALS [A,B], EQUALS [A,C]] // except complex types // Rewrite to OR is done only if number of operands are less than // the threshold configured childRexNodeLst = rewriteInClauseChildren(calciteOp, childRexNodeLst, rexBuilder); calciteOp = SqlStdOperatorTable.OR; } } else if (calciteOp.getKind() == SqlKind.COALESCE && childRexNodeLst.size() > 1) { // Rewrite COALESCE as a CASE // This allows to be further reduced to OR, if possible calciteOp = SqlStdOperatorTable.CASE; childRexNodeLst = rewriteCoalesceChildren(childRexNodeLst, rexBuilder); // Adjust branch types by inserting explicit casts if the actual is ambiguous childRexNodeLst = adjustCaseBranchTypes(childRexNodeLst, retType, rexBuilder); } else if (calciteOp == HiveToDateSqlOperator.INSTANCE) { childRexNodeLst = rewriteToDateChildren(childRexNodeLst, rexBuilder); } else if (calciteOp.getKind() == SqlKind.BETWEEN) { assert childRexNodeLst.get(0).isAlwaysTrue() || childRexNodeLst.get(0).isAlwaysFalse(); childRexNodeLst = rewriteBetweenChildren(childRexNodeLst, rexBuilder); if (childRexNodeLst.get(0).isAlwaysTrue()) { calciteOp = SqlStdOperatorTable.OR; } else { calciteOp = SqlStdOperatorTable.AND; } } expr = rexBuilder.makeCall(retType, calciteOp, childRexNodeLst); } else { retType = expr.getType(); } // TODO: Cast Function in Calcite have a bug where it infer type on cast throws // an exception if (expr instanceof RexCall && !(((RexCall) expr).getOperator() instanceof SqlCastFunction)) { RexCall call = (RexCall) expr; expr = rexBuilder.makeCall(retType, call.getOperator(), RexUtil.flatten(call.getOperands(), call.getOperator())); } return expr; } private static boolean castExprUsingUDFBridge(GenericUDF gUDF) { boolean castExpr = false; if (gUDF instanceof GenericUDFBridge) { String udfClassName = ((GenericUDFBridge) gUDF).getUdfClassName(); if (udfClassName != null) { int sp = udfClassName.lastIndexOf('.'); // TODO: add method to UDFBridge to say if it is a cast func if (sp >= 0 & (sp + 1) < udfClassName.length()) { udfClassName = udfClassName.substring(sp + 1); if (udfClassName.equals("UDFToBoolean") || udfClassName.equals("UDFToByte") || udfClassName.equals("UDFToDouble") || udfClassName.equals("UDFToInteger") || udfClassName.equals("UDFToLong") || udfClassName.equals("UDFToShort") || udfClassName.equals("UDFToFloat")) { castExpr = true; } } } } return castExpr; } public static RexNode handleExplicitCast(GenericUDF udf, RelDataType returnType, List<RexNode> childRexNodeLst, RexBuilder rexBuilder) { RexNode castExpr = null; if (childRexNodeLst != null && childRexNodeLst.size() == 1) { if ((udf instanceof GenericUDFToChar) || (udf instanceof GenericUDFToVarchar) || (udf instanceof GenericUDFToString) || (udf instanceof GenericUDFToDecimal) || (udf instanceof GenericUDFToDate) || (udf instanceof GenericUDFTimestamp) || (udf instanceof GenericUDFToTimestampLocalTZ) || (udf instanceof GenericUDFToBinary) || castExprUsingUDFBridge(udf) || (udf instanceof GenericUDFToMap) || (udf instanceof GenericUDFToArray) || (udf instanceof GenericUDFToStruct)) { castExpr = rexBuilder.makeAbstractCast(returnType, childRexNodeLst.get(0)); } } return castExpr; } /* * Hive syntax allows to define CASE expressions in two ways: * - CASE a WHEN b THEN c [WHEN d THEN e]* [ELSE f] END (translated into the * "case" function, ELSE clause is optional) * - CASE WHEN a THEN b [WHEN c THEN d]* [ELSE e] END (translated into the * "when" function, ELSE clause is optional) * The first type is transformed to the second one at parsing time. * Calcite only has the equivalent to the "when" Hive function and ELSE clause is * not optional. * * See parser rule caseExpression in IdentifiersParser.g */ public static List<RexNode> rewriteCaseChildren(List<RexNode> childRexNodeLst, RexBuilder rexBuilder) { List<RexNode> newChildRexNodeLst = new ArrayList<>(); for (int i = 0; i < childRexNodeLst.size(); i++) { RexNode child = childRexNodeLst.get(i); if (RexUtil.isNull(child)) { if (i % 2 == 0 && i != childRexNodeLst.size() - 1) { if (SqlTypeName.NULL.equals(child.getType().getSqlTypeName())) { child = rexBuilder.makeNullLiteral(rexBuilder.getTypeFactory().createSqlType(SqlTypeName.BOOLEAN)); } } else { // this is needed to provide typed NULLs which were working before // example: IF(false, array(1,2,3), NULL) if (!RexUtil.isNull(childRexNodeLst.get(1))) { child = rexBuilder.makeCast(childRexNodeLst.get(1).getType(), child); } } } newChildRexNodeLst.add(child); } // Calcite always needs the else clause to be defined explicitly if (newChildRexNodeLst.size() % 2 == 0) { newChildRexNodeLst.add(rexBuilder.makeNullLiteral( newChildRexNodeLst.get(newChildRexNodeLst.size()-1).getType())); } return newChildRexNodeLst; } /** * Adds explicit casts if Calcite's type system could not resolve the CASE branches to a common type. * * Calcite is more stricter than hive w.r.t type conversions. * If a CASE has branches with string/int/boolean branch types; there is no common type. */ public static List<RexNode> adjustCaseBranchTypes(List<RexNode> nodes, RelDataType retType, RexBuilder rexBuilder) { List<RexNode> newNodes = new ArrayList<>(); for (int i = 0; i < nodes.size(); i++) { RexNode node = nodes.get(i); if ((i % 2 == 1 || i == nodes.size() - 1) && !node.getType().getSqlTypeName().equals(retType.getSqlTypeName())) { newNodes.add(rexBuilder.makeCast(retType, node)); } else { newNodes.add(node); } } return newNodes; } public static List<RexNode> rewriteExtractDateChildren(SqlOperator op, List<RexNode> childRexNodeLst, RexBuilder rexBuilder) { List<RexNode> newChildRexNodeLst = new ArrayList<>(2); final boolean isTimestampLevel; if (op == HiveExtractDate.YEAR) { newChildRexNodeLst.add(rexBuilder.makeFlag(TimeUnitRange.YEAR)); isTimestampLevel = false; } else if (op == HiveExtractDate.QUARTER) { newChildRexNodeLst.add(rexBuilder.makeFlag(TimeUnitRange.QUARTER)); isTimestampLevel = false; } else if (op == HiveExtractDate.MONTH) { newChildRexNodeLst.add(rexBuilder.makeFlag(TimeUnitRange.MONTH)); isTimestampLevel = false; } else if (op == HiveExtractDate.WEEK) { newChildRexNodeLst.add(rexBuilder.makeFlag(TimeUnitRange.WEEK)); isTimestampLevel = false; } else if (op == HiveExtractDate.DAY) { newChildRexNodeLst.add(rexBuilder.makeFlag(TimeUnitRange.DAY)); isTimestampLevel = false; } else if (op == HiveExtractDate.HOUR) { newChildRexNodeLst.add(rexBuilder.makeFlag(TimeUnitRange.HOUR)); isTimestampLevel = true; } else if (op == HiveExtractDate.MINUTE) { newChildRexNodeLst.add(rexBuilder.makeFlag(TimeUnitRange.MINUTE)); isTimestampLevel = true; } else if (op == HiveExtractDate.SECOND) { newChildRexNodeLst.add(rexBuilder.makeFlag(TimeUnitRange.SECOND)); isTimestampLevel = true; } else { isTimestampLevel = false; } final RexNode child = Iterables.getOnlyElement(childRexNodeLst); if (SqlTypeUtil.isDatetime(child.getType()) || SqlTypeUtil.isInterval(child.getType())) { newChildRexNodeLst.add(child); } else { // We need to add a cast to DATETIME Family if (isTimestampLevel) { newChildRexNodeLst.add(makeCast(SqlTypeName.TIMESTAMP, child, rexBuilder)); } else { newChildRexNodeLst.add(makeCast(SqlTypeName.DATE, child, rexBuilder)); } } return newChildRexNodeLst; } private static RexNode makeCast(SqlTypeName typeName, final RexNode child, RexBuilder rexBuilder) { RelDataType sqlType = rexBuilder.getTypeFactory().createSqlType(typeName); RelDataType nullableType = rexBuilder.getTypeFactory().createTypeWithNullability(sqlType, true); return rexBuilder.makeCast(nullableType, child); } public static List<RexNode> rewriteFloorDateChildren(SqlOperator op, List<RexNode> childRexNodeLst, RexBuilder rexBuilder) { List<RexNode> newChildRexNodeLst = new ArrayList<>(); assert childRexNodeLst.size() == 1; newChildRexNodeLst.add(childRexNodeLst.get(0)); if (op == HiveFloorDate.YEAR) { newChildRexNodeLst.add(rexBuilder.makeFlag(TimeUnitRange.YEAR)); } else if (op == HiveFloorDate.QUARTER) { newChildRexNodeLst.add(rexBuilder.makeFlag(TimeUnitRange.QUARTER)); } else if (op == HiveFloorDate.MONTH) { newChildRexNodeLst.add(rexBuilder.makeFlag(TimeUnitRange.MONTH)); } else if (op == HiveFloorDate.WEEK) { newChildRexNodeLst.add(rexBuilder.makeFlag(TimeUnitRange.WEEK)); } else if (op == HiveFloorDate.DAY) { newChildRexNodeLst.add(rexBuilder.makeFlag(TimeUnitRange.DAY)); } else if (op == HiveFloorDate.HOUR) { newChildRexNodeLst.add(rexBuilder.makeFlag(TimeUnitRange.HOUR)); } else if (op == HiveFloorDate.MINUTE) { newChildRexNodeLst.add(rexBuilder.makeFlag(TimeUnitRange.MINUTE)); } else if (op == HiveFloorDate.SECOND) { newChildRexNodeLst.add(rexBuilder.makeFlag(TimeUnitRange.SECOND)); } return newChildRexNodeLst; } public static List<RexNode> rewriteToDateChildren(List<RexNode> childRexNodeLst, RexBuilder rexBuilder) { List<RexNode> newChildRexNodeLst = new ArrayList<>(); assert childRexNodeLst.size() == 1; RexNode child = childRexNodeLst.get(0); if (SqlTypeUtil.isDatetime(child.getType()) || SqlTypeUtil.isInterval(child.getType())) { newChildRexNodeLst.add(child); } else { newChildRexNodeLst.add(makeCast(SqlTypeName.TIMESTAMP, child, rexBuilder)); } return newChildRexNodeLst; } /** * The method tries to rewrite an the operands of an IN function call into * the operands for an OR function call. * For instance: * <pre> * (c) IN ( v1, v2, ...) =&gt; c=v1 || c=v2 || ... * Input: (c, v1, v2, ...) * Output: (c=v1, c=v2, ...) * </pre> * Or: * <pre> * (c,d) IN ( (v1,v2), (v3,v4), ...) =&gt; (c=v1 &amp;&amp; d=v2) || (c=v3 &amp;&amp; d=v4) || ... * Input: ((c,d), (v1,v2), (v3,v4), ...) * Output: (c=v1 &amp;&amp; d=v2, c=v3 &amp;&amp; d=v4, ...) * </pre> * * Returns null if the transformation fails, e.g., when non-deterministic * calls are found in the expressions. */ public static List<RexNode> transformInToOrOperands(List<RexNode> operands, RexBuilder rexBuilder) { final List<RexNode> disjuncts = new ArrayList<>(operands.size() - 2); if (operands.get(0).getKind() != SqlKind.ROW) { final RexNode columnExpression = operands.get(0); if (!HiveCalciteUtil.isDeterministic(columnExpression)) { // Bail out return null; } for (int i = 1; i < operands.size(); i++) { final RexNode valueExpression = operands.get(i); if (!HiveCalciteUtil.isDeterministic(valueExpression)) { // Bail out return null; } disjuncts.add(rexBuilder.makeCall( SqlStdOperatorTable.EQUALS, columnExpression, valueExpression)); } } else { final RexCall columnExpressions = (RexCall) operands.get(0); if (!HiveCalciteUtil.isDeterministic(columnExpressions)) { // Bail out return null; } for (int i = 1; i < operands.size(); i++) { List<RexNode> conjuncts = new ArrayList<>(columnExpressions.getOperands().size() - 1); List<?> valueExpressions = null; if (operands.get(i) instanceof RexLiteral) { RexLiteral literal = (RexLiteral) operands.get(i); valueExpressions = literal.getValueAs(List.class); } else if (operands.get(i) instanceof RexCall) { RexCall call = (RexCall) operands.get(i); if (HiveCalciteUtil.isDeterministic(call)) { valueExpressions = call.getOperands(); } } if (valueExpressions == null) { return null; } for (int j = 0; j < columnExpressions.getOperands().size(); j++) { conjuncts.add(rexBuilder.makeCall( SqlStdOperatorTable.EQUALS, columnExpressions.getOperands().get(j), (RexNode) valueExpressions.get(j))); } if (conjuncts.size() > 1) { disjuncts.add(rexBuilder.makeCall( SqlStdOperatorTable.AND, conjuncts)); } else { disjuncts.add(conjuncts.get(0)); } } } return disjuncts; } public static List<RexNode> rewriteInClauseChildren(SqlOperator op, List<RexNode> childRexNodeLst, RexBuilder rexBuilder) throws SemanticException { assert op == HiveIn.INSTANCE; RexNode firstPred = childRexNodeLst.get(0); List<RexNode> newChildRexNodeLst = new ArrayList<RexNode>(); for (int i = 1; i < childRexNodeLst.size(); i++) { newChildRexNodeLst.add( rexBuilder.makeCall( SqlStdOperatorTable.EQUALS, firstPred, childRexNodeLst.get(i))); } return newChildRexNodeLst; } public static List<RexNode> rewriteCoalesceChildren( List<RexNode> childRexNodeLst, RexBuilder rexBuilder) { final List<RexNode> convertedChildList = Lists.newArrayList(); assert childRexNodeLst.size() > 0; int i=0; for (; i < childRexNodeLst.size()-1; ++i) { // WHEN child not null THEN child final RexNode child = childRexNodeLst.get(i); RexNode childCond = rexBuilder.makeCall( SqlStdOperatorTable.IS_NOT_NULL, child); convertedChildList.add(childCond); convertedChildList.add(child); } // Add the last child as the ELSE element convertedChildList.add(childRexNodeLst.get(i)); return convertedChildList; } public static List<RexNode> rewriteBetweenChildren(List<RexNode> childRexNodeLst, RexBuilder rexBuilder) { final List<RexNode> convertedChildList = Lists.newArrayList(); SqlBinaryOperator cmpOp; if (childRexNodeLst.get(0).isAlwaysTrue()) { cmpOp = SqlStdOperatorTable.GREATER_THAN; } else { cmpOp = SqlStdOperatorTable.LESS_THAN_OR_EQUAL; } RexNode op = childRexNodeLst.get(1); RexNode rangeL = childRexNodeLst.get(2); RexNode rangeH = childRexNodeLst.get(3); convertedChildList.add(rexBuilder.makeCall(cmpOp, rangeL, op)); convertedChildList.add(rexBuilder.makeCall(cmpOp, op, rangeH)); return convertedChildList; } private static boolean checkForStatefulFunctions(List<ExprNodeDesc> list) { for (ExprNodeDesc node : list) { if (node instanceof ExprNodeGenericFuncDesc) { GenericUDF nodeUDF = ((ExprNodeGenericFuncDesc) node).getGenericUDF(); // Stateful? if (FunctionRegistry.isStateful(nodeUDF)) { return true; } if (node.getChildren() != null && !node.getChildren().isEmpty() && checkForStatefulFunctions(node.getChildren())) { return true; } } } return false; } protected RexNode convert(ExprNodeConstantDesc literal) throws CalciteSemanticException { final RelDataTypeFactory dtFactory = rexBuilder.getTypeFactory(); final PrimitiveTypeInfo hiveType = (PrimitiveTypeInfo) literal.getTypeInfo(); final RelDataType calciteDataType = TypeConverter.convert(hiveType, dtFactory); PrimitiveCategory hiveTypeCategory = hiveType.getPrimitiveCategory(); ConstantObjectInspector coi = literal.getWritableObjectInspector(); Object value = ObjectInspectorUtils.copyToStandardJavaObject(coi.getWritableConstantValue(), coi); RexNode calciteLiteral = null; // If value is null, the type should also be VOID. if (value == null) { hiveTypeCategory = PrimitiveCategory.VOID; } // TODO: Verify if we need to use ConstantObjectInspector to unwrap data switch (hiveTypeCategory) { case BOOLEAN: calciteLiteral = rexBuilder.makeLiteral(((Boolean) value).booleanValue()); break; case BYTE: calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Byte) value), calciteDataType); break; case SHORT: calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Short) value), calciteDataType); break; case INT: calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Integer) value)); break; case LONG: calciteLiteral = rexBuilder.makeBigintLiteral(new BigDecimal((Long) value)); break; case DECIMAL: if (value instanceof HiveDecimal) { value = ((HiveDecimal) value).bigDecimalValue(); } else if (value instanceof Decimal128) { value = ((Decimal128) value).toBigDecimal(); } if (value == null) { // We have found an invalid decimal value while enforcing precision and // scale. Ideally, // we would replace it with null here, which is what Hive does. However, // we need to plumb // this thru up somehow, because otherwise having different expression // type in AST causes // the plan generation to fail after CBO, probably due to some residual // state in SA/QB. // For now, we will not run CBO in the presence of invalid decimal // literals. throw new CalciteSemanticException("Expression " + literal.getExprString() + " is not a valid decimal", UnsupportedFeature.Invalid_decimal); // TODO: return createNullLiteral(literal); } calciteLiteral = rexBuilder.makeExactLiteral((BigDecimal) value, calciteDataType); break; case FLOAT: calciteLiteral = rexBuilder.makeApproxLiteral( new BigDecimal(Float.toString((Float)value)), calciteDataType); break; case DOUBLE: // TODO: The best solution is to support NaN in expression reduction. if (Double.isNaN((Double) value)) { throw new CalciteSemanticException("NaN", UnsupportedFeature.Invalid_decimal); } calciteLiteral = rexBuilder.makeApproxLiteral( new BigDecimal(Double.toString((Double)value)), calciteDataType); break; case CHAR: if (value instanceof HiveChar) { value = ((HiveChar) value).getValue(); } final int lengthChar = TypeInfoUtils.getCharacterLengthForType(hiveType); RelDataType charType = rexBuilder.getTypeFactory().createTypeWithCharsetAndCollation( rexBuilder.getTypeFactory().createSqlType(SqlTypeName.CHAR, lengthChar), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT); calciteLiteral = rexBuilder.makeLiteral( RexNodeExprFactory.makeHiveUnicodeString((String) value), charType, false); break; case VARCHAR: if (value instanceof HiveVarchar) { value = ((HiveVarchar) value).getValue(); } final int lengthVarchar = TypeInfoUtils.getCharacterLengthForType(hiveType); RelDataType varcharType = rexBuilder.getTypeFactory().createTypeWithCharsetAndCollation( rexBuilder.getTypeFactory().createSqlType(SqlTypeName.VARCHAR, lengthVarchar), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT); calciteLiteral = rexBuilder.makeLiteral( RexNodeExprFactory.makeHiveUnicodeString((String) value), varcharType, true); break; case STRING: RelDataType stringType = rexBuilder.getTypeFactory().createTypeWithCharsetAndCollation( rexBuilder.getTypeFactory().createSqlType(SqlTypeName.VARCHAR, Integer.MAX_VALUE), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT); calciteLiteral = rexBuilder.makeLiteral( RexNodeExprFactory.makeHiveUnicodeString((String) value), stringType, true); break; case DATE: final Date date = (Date) value; calciteLiteral = rexBuilder.makeDateLiteral( DateString.fromDaysSinceEpoch(date.toEpochDay())); break; case TIMESTAMP: final TimestampString tsString; if (value instanceof Calendar) { tsString = TimestampString.fromCalendarFields((Calendar) value); } else { final Timestamp ts = (Timestamp) value; tsString = TimestampString.fromMillisSinceEpoch(ts.toEpochMilli()).withNanos(ts.getNanos()); } // Must call makeLiteral, not makeTimestampLiteral // to have the RexBuilder.roundTime logic kick in calciteLiteral = rexBuilder.makeLiteral( tsString, rexBuilder.getTypeFactory().createSqlType( SqlTypeName.TIMESTAMP, rexBuilder.getTypeFactory().getTypeSystem().getDefaultPrecision(SqlTypeName.TIMESTAMP)), false); break; case TIMESTAMPLOCALTZ: final TimestampString tsLocalTZString; Instant i = ((TimestampTZ)value).getZonedDateTime().toInstant(); tsLocalTZString = TimestampString .fromMillisSinceEpoch(i.toEpochMilli()) .withNanos(i.getNano()); calciteLiteral = rexBuilder.makeTimestampWithLocalTimeZoneLiteral( tsLocalTZString, rexBuilder.getTypeFactory().getTypeSystem().getDefaultPrecision(SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE)); break; case INTERVAL_YEAR_MONTH: // Calcite year-month literal value is months as BigDecimal BigDecimal totalMonths = BigDecimal.valueOf(((HiveIntervalYearMonth) value).getTotalMonths()); calciteLiteral = rexBuilder.makeIntervalLiteral(totalMonths, new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1, 1))); break; case INTERVAL_DAY_TIME: // Calcite day-time interval is millis value as BigDecimal // Seconds converted to millis BigDecimal secsValueBd = BigDecimal .valueOf(((HiveIntervalDayTime) value).getTotalSeconds() * 1000); // Nanos converted to millis BigDecimal nanosValueBd = BigDecimal.valueOf(((HiveIntervalDayTime) value).getNanos(), 6); calciteLiteral = rexBuilder.makeIntervalLiteral(secsValueBd.add(nanosValueBd), new SqlIntervalQualifier(TimeUnit.MILLISECOND, null, new SqlParserPos(1, 1))); break; case VOID: calciteLiteral = rexBuilder.makeLiteral(null, calciteDataType, true); break; case BINARY: case UNKNOWN: default: throw new RuntimeException("Unsupported Literal"); } return calciteLiteral; } }
apache/ignite-3
36,749
modules/catalog/src/test/java/org/apache/ignite/internal/catalog/CatalogIndexTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.catalog; import static java.util.stream.Collectors.toList; import static org.apache.ignite.internal.catalog.commands.CatalogUtils.pkIndexName; import static org.apache.ignite.internal.catalog.descriptors.CatalogColumnCollation.ASC_NULLS_LAST; import static org.apache.ignite.internal.catalog.descriptors.CatalogColumnCollation.DESC_NULLS_FIRST; import static org.apache.ignite.internal.catalog.descriptors.CatalogIndexStatus.AVAILABLE; import static org.apache.ignite.internal.catalog.descriptors.CatalogIndexStatus.BUILDING; import static org.apache.ignite.internal.catalog.descriptors.CatalogIndexStatus.REGISTERED; import static org.apache.ignite.internal.catalog.descriptors.CatalogIndexStatus.STOPPING; import static org.apache.ignite.internal.testframework.matchers.CompletableFutureExceptionMatcher.willThrow; import static org.apache.ignite.internal.testframework.matchers.CompletableFutureExceptionMatcher.willThrowFast; import static org.apache.ignite.internal.testframework.matchers.CompletableFutureMatcher.willCompleteSuccessfully; import static org.apache.ignite.internal.util.CollectionUtils.view; import static org.apache.ignite.internal.util.CompletableFutures.falseCompletedFuture; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItems; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNotSame; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertSame; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.clearInvocations; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoInteractions; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; import java.util.List; import java.util.concurrent.CompletableFuture; import org.apache.ignite.internal.catalog.commands.DropIndexCommand; import org.apache.ignite.internal.catalog.commands.MakeIndexAvailableCommand; import org.apache.ignite.internal.catalog.commands.RemoveIndexCommand; import org.apache.ignite.internal.catalog.commands.RenameIndexCommand; import org.apache.ignite.internal.catalog.commands.StartBuildingIndexCommand; import org.apache.ignite.internal.catalog.descriptors.CatalogHashIndexDescriptor; import org.apache.ignite.internal.catalog.descriptors.CatalogIndexColumnDescriptor; import org.apache.ignite.internal.catalog.descriptors.CatalogIndexDescriptor; import org.apache.ignite.internal.catalog.descriptors.CatalogIndexDescriptor.CatalogIndexDescriptorType; import org.apache.ignite.internal.catalog.descriptors.CatalogIndexStatus; import org.apache.ignite.internal.catalog.descriptors.CatalogObjectDescriptor; import org.apache.ignite.internal.catalog.descriptors.CatalogSortedIndexDescriptor; import org.apache.ignite.internal.catalog.descriptors.CatalogTableDescriptor; import org.apache.ignite.internal.catalog.events.CatalogEvent; import org.apache.ignite.internal.catalog.events.CatalogEventParameters; import org.apache.ignite.internal.catalog.events.CreateIndexEventParameters; import org.apache.ignite.internal.catalog.events.MakeIndexAvailableEventParameters; import org.apache.ignite.internal.catalog.events.RemoveIndexEventParameters; import org.apache.ignite.internal.catalog.events.StartBuildingIndexEventParameters; import org.apache.ignite.internal.catalog.events.StoppingIndexEventParameters; import org.apache.ignite.internal.event.EventListener; import org.jetbrains.annotations.Nullable; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.EnumSource; import org.junit.jupiter.params.provider.EnumSource.Mode; import org.mockito.ArgumentCaptor; /** Tests for index related commands. */ public class CatalogIndexTest extends BaseCatalogManagerTest { @Test public void testCreateHashIndex() { int tableCreationVersion = tryApplyAndExpectApplied(simpleTable(TABLE_NAME)).getCatalogVersion(); int indexCreationVersion = tryApplyAndExpectApplied(createHashIndexCommand(INDEX_NAME, List.of("VAL", "ID"))).getCatalogVersion(); // Validate catalog version from the past. Catalog catalog = manager.catalog(tableCreationVersion); assertNotNull(catalog); assertNotNull(catalog.table(SCHEMA_NAME, TABLE_NAME)); assertNull(catalog.aliveIndex(SCHEMA_NAME, INDEX_NAME)); assertNull(catalog.schema(SCHEMA_NAME).aliveIndex(INDEX_NAME)); // Validate actual catalog catalog = manager.catalog(indexCreationVersion); assertNotNull(catalog); CatalogTableDescriptor table = catalog.table(SCHEMA_NAME, TABLE_NAME); CatalogHashIndexDescriptor index = (CatalogHashIndexDescriptor) catalog.aliveIndex(SCHEMA_NAME, INDEX_NAME); assertNotNull(table); assertNotNull(index); assertSame(index, catalog.schema(SCHEMA_NAME).aliveIndex(INDEX_NAME)); assertSame(index, catalog.index(index.id())); // Validate newly created hash index assertEquals(INDEX_NAME, index.name()); assertEquals(CatalogIndexDescriptorType.HASH, index.indexType()); assertEquals(table.id(), index.tableId()); assertEquals(List.of("VAL", "ID"), index.columns()); assertFalse(index.unique()); assertEquals(REGISTERED, index.status()); } /** The index created with the table must be in the {@link CatalogIndexStatus#AVAILABLE} state. */ @Test public void testCreateHashIndexWithTable() { int catalogVersion = tryApplyAndCheckExpect( List.of( simpleTable(TABLE_NAME), createHashIndexCommand(INDEX_NAME, List.of("VAL", "ID"))), true, true).getCatalogVersion(); Catalog catalog = manager.catalog(catalogVersion); assertNotNull(catalog); // Validate newly created hash index. CatalogHashIndexDescriptor index = (CatalogHashIndexDescriptor) catalog.aliveIndex(SCHEMA_NAME, INDEX_NAME); assertEquals(INDEX_NAME, index.name()); assertEquals(catalog.table(SCHEMA_NAME, TABLE_NAME).id(), index.tableId()); assertEquals(List.of("VAL", "ID"), index.columns()); assertFalse(index.unique()); assertEquals(AVAILABLE, index.status()); } @Test public void testCreateSortedIndex() { int tableCreationVersion = tryApplyAndExpectApplied(simpleTable(TABLE_NAME)).getCatalogVersion(); int indexCreationVersion = tryApplyAndExpectApplied( createSortedIndexCommand( INDEX_NAME, true, List.of("VAL", "ID"), List.of(DESC_NULLS_FIRST, ASC_NULLS_LAST) )).getCatalogVersion(); // Validate catalog version from the past. Catalog catalog = manager.catalog(tableCreationVersion); assertNotNull(catalog); assertNotNull(catalog.table(SCHEMA_NAME, TABLE_NAME)); assertNull(catalog.aliveIndex(SCHEMA_NAME, INDEX_NAME)); assertNull(catalog.schema(SCHEMA_NAME).aliveIndex(INDEX_NAME)); // Validate actual catalog catalog = manager.catalog(indexCreationVersion); assertNotNull(catalog); CatalogTableDescriptor table = catalog.table(SCHEMA_NAME, TABLE_NAME); CatalogSortedIndexDescriptor index = (CatalogSortedIndexDescriptor) catalog.aliveIndex(SCHEMA_NAME, INDEX_NAME); assertNotNull(table); assertNotNull(index); assertSame(index, catalog.schema(SCHEMA_NAME).aliveIndex(INDEX_NAME)); assertSame(index, catalog.index(index.id())); // Validate newly created sorted index assertEquals(INDEX_NAME, index.name()); assertEquals(CatalogIndexDescriptorType.SORTED, index.indexType()); assertEquals(table.id(), index.tableId()); assertEquals(List.of("VAL", "ID"), view(index.columns(), CatalogIndexColumnDescriptor::name)); assertEquals(List.of(DESC_NULLS_FIRST, ASC_NULLS_LAST), view(index.columns(), CatalogIndexColumnDescriptor::collation)); assertTrue(index.unique()); assertEquals(REGISTERED, index.status()); } /** The index created with the table must be in the {@link CatalogIndexStatus#AVAILABLE} state. */ @Test public void testCreateSortedIndexWithTable() { int catalogVersion = tryApplyAndCheckExpect( List.of( simpleTable(TABLE_NAME), createSortedIndexCommand( INDEX_NAME, true, List.of("VAL", "ID"), List.of(DESC_NULLS_FIRST, ASC_NULLS_LAST) )), true, true).getCatalogVersion(); Catalog catalog = manager.catalog(catalogVersion); assertNotNull(catalog); // Validate newly created sorted index. CatalogSortedIndexDescriptor index = (CatalogSortedIndexDescriptor) catalog.aliveIndex(SCHEMA_NAME, INDEX_NAME); assertEquals(INDEX_NAME, index.name()); assertEquals(catalog.table(SCHEMA_NAME, TABLE_NAME).id(), index.tableId()); assertEquals("VAL", index.columns().get(0).name()); assertEquals("ID", index.columns().get(1).name()); assertEquals(DESC_NULLS_FIRST, index.columns().get(0).collation()); assertEquals(ASC_NULLS_LAST, index.columns().get(1).collation()); assertTrue(index.unique()); assertEquals(AVAILABLE, index.status()); } @Test public void testDropTableWithIndex() { createTableWithIndex(TABLE_NAME, INDEX_NAME); long beforeDropTimestamp = clock.nowLong(); int beforeDropVersion = manager.latestCatalogVersion(); tryApplyAndExpectApplied(dropTableCommand(TABLE_NAME)); // Validate catalog version from the past. Catalog catalog = manager.catalog(beforeDropVersion); assertNotNull(catalog); assertSame(catalog, manager.activeCatalog(beforeDropTimestamp)); CatalogTableDescriptor table = catalog.table(SCHEMA_NAME, TABLE_NAME); CatalogIndexDescriptor index = catalog.aliveIndex(SCHEMA_NAME, INDEX_NAME); assertNotNull(table); assertNotNull(index); assertSame(table, catalog.table(table.id())); assertSame(index, catalog.index(index.id())); assertThat(index.status(), is(AVAILABLE)); // Validate actual catalog Catalog latestCatalog = manager.catalog(manager.latestCatalogVersion()); assertNotNull(latestCatalog); assertSame(latestCatalog, manager.activeCatalog(clock.nowLong())); assertNotSame(catalog, latestCatalog); assertNull(latestCatalog.table(SCHEMA_NAME, TABLE_NAME)); assertNull(latestCatalog.table(table.id())); assertNull(latestCatalog.aliveIndex(SCHEMA_NAME, INDEX_NAME)); assertNull(latestCatalog.index(index.id())); } @Test public void testGetTableIdOnDropIndexEvent() { createTableWithIndex(TABLE_NAME, INDEX_NAME); Catalog catalog = manager.activeCatalog(clock.nowLong()); CatalogTableDescriptor table = catalog.table(SCHEMA_NAME, TABLE_NAME); CatalogIndexDescriptor pkIndex = catalog.aliveIndex(SCHEMA_NAME, pkIndexName(TABLE_NAME)); CatalogIndexDescriptor index = catalog.aliveIndex(SCHEMA_NAME, INDEX_NAME); assertNotNull(table); assertNotNull(pkIndex); assertNotNull(index); assertThat(index.status(), is(AVAILABLE)); assertThat(index.indexType(), is(CatalogIndexDescriptorType.HASH)); assertNotEquals(pkIndex.id(), index.id()); EventListener<StoppingIndexEventParameters> stoppingListener = mock(EventListener.class); EventListener<RemoveIndexEventParameters> removedListener = mock(EventListener.class); ArgumentCaptor<StoppingIndexEventParameters> stoppingCaptor = ArgumentCaptor.forClass(StoppingIndexEventParameters.class); ArgumentCaptor<RemoveIndexEventParameters> removingCaptor = ArgumentCaptor.forClass(RemoveIndexEventParameters.class); doReturn(falseCompletedFuture()).when(stoppingListener).notify(stoppingCaptor.capture()); doReturn(falseCompletedFuture()).when(removedListener).notify(removingCaptor.capture()); manager.listen(CatalogEvent.INDEX_STOPPING, stoppingListener); manager.listen(CatalogEvent.INDEX_REMOVED, removedListener); // Let's drop the index. tryApplyAndExpectApplied(DropIndexCommand.builder().schemaName(SCHEMA_NAME).indexName(INDEX_NAME).build()); StoppingIndexEventParameters stoppingEventParameters = stoppingCaptor.getValue(); assertEquals(index.id(), stoppingEventParameters.indexId()); // Let's drop the table. tryApplyAndExpectApplied(dropTableCommand(TABLE_NAME)); // Let's make sure that the PK index has been removed. RemoveIndexEventParameters pkRemovedEventParameters = removingCaptor.getAllValues().get(0); assertEquals(pkIndex.id(), pkRemovedEventParameters.indexId()); } @Test public void testReCreateIndexWithSameName() { createTableWithIndex(TABLE_NAME, INDEX_NAME); int beforeDropVersion = manager.latestCatalogVersion(); CatalogIndexDescriptor index1 = index(beforeDropVersion, INDEX_NAME); assertNotNull(index1); int indexId1 = index1.id(); // Drop index. dropIndex(INDEX_NAME); removeIndex(indexId1); assertNull(index(manager.latestCatalogVersion(), INDEX_NAME)); // Re-create index with same name. createSomeSortedIndex(TABLE_NAME, INDEX_NAME); CatalogIndexDescriptor index2 = index(manager.latestCatalogVersion(), INDEX_NAME); assertNotNull(index2); assertThat(index2.indexType(), equalTo(CatalogIndexDescriptorType.SORTED)); // Ensure these are different indexes. int indexId2 = index2.id(); assertNotEquals(indexId1, indexId2); // Ensure dropped index is available for historical queries. assertSame(index1, manager.catalog(beforeDropVersion).index(indexId1)); assertNull(manager.catalog(beforeDropVersion).index(indexId2)); } @Test public void droppingAnAvailableIndexMovesItToStoppingState() { createSomeTable(TABLE_NAME); createSomeIndex(TABLE_NAME, INDEX_NAME); int indexId = indexId(INDEX_NAME); rollIndexStatusTo(AVAILABLE, indexId); dropIndex(INDEX_NAME); CatalogIndexDescriptor index = manager.activeCatalog(clock.nowLong()).index(indexId); assertThat(index, is(notNullValue())); assertThat(index.status(), is(STOPPING)); } @ParameterizedTest @EnumSource(value = CatalogIndexStatus.class, names = {"REGISTERED", "BUILDING"}, mode = Mode.INCLUDE) public void droppingNotAvailableIndexRemovesIt(CatalogIndexStatus status) { createSomeTable(TABLE_NAME); createSomeIndex(TABLE_NAME, INDEX_NAME); rollIndexStatusTo(status, indexId(INDEX_NAME)); dropIndex(INDEX_NAME); CatalogIndexDescriptor index = index(manager.latestCatalogVersion(), INDEX_NAME); assertThat(index, is(nullValue())); } private void startBuildingIndex(int indexId) { tryApplyAndExpectApplied(StartBuildingIndexCommand.builder().indexId(indexId).build()); } @Test public void removingStoppedIndexRemovesItFromCatalog() { createSomeTable(TABLE_NAME); createSomeIndex(TABLE_NAME, INDEX_NAME); int indexId = indexId(INDEX_NAME); rollIndexStatusTo(STOPPING, indexId); assertThat(manager.activeCatalog(clock.nowLong()).index(indexId).status(), is(STOPPING)); // Stopping index can't be resolved by name. assertNull(manager.activeCatalog(clock.nowLong()).aliveIndex(SCHEMA_NAME, INDEX_NAME)); removeIndex(indexId); assertNull(manager.activeCatalog(clock.nowLong()).index(indexId)); assertNull(manager.activeCatalog(clock.nowLong()).aliveIndex(SCHEMA_NAME, INDEX_NAME)); } private void rollIndexStatusTo(CatalogIndexStatus status, int indexId) { for (CatalogIndexStatus currentStatus : List.of(REGISTERED, BUILDING, AVAILABLE, STOPPING)) { if (currentStatus == status) { break; } switch (currentStatus) { case REGISTERED: startBuildingIndex(indexId); break; case BUILDING: makeIndexAvailable(indexId); break; case AVAILABLE: dropIndex(indexId); break; case STOPPING: removeIndex(indexId); break; default: fail("Unsupported state: " + currentStatus); break; } } } private void removeIndex(int indexId) { tryApplyAndExpectApplied(RemoveIndexCommand.builder().indexId(indexId).build()); } private void dropIndex(String indexName) { tryApplyAndExpectApplied(DropIndexCommand.builder().indexName(indexName).schemaName(SCHEMA_NAME).build()); } private void dropIndex(int indexId) { CatalogIndexDescriptor index = manager.activeCatalog(Long.MAX_VALUE).index(indexId); assertThat(index, is(notNullValue())); dropIndex(index.name()); } @Test public void testDropNotExistingIndex() { assertNull(manager.activeCatalog(clock.nowLong()).aliveIndex(SCHEMA_NAME, INDEX_NAME)); assertThat( manager.execute(DropIndexCommand.builder().schemaName(SCHEMA_NAME).indexName(INDEX_NAME).build()), willThrowFast(CatalogValidationException.class, "Index with name 'PUBLIC.myIndex' not found.") ); } @Test public void testStartHashIndexBuilding() { createSomeTable(TABLE_NAME); tryApplyAndExpectApplied(createHashIndexCommand(INDEX_NAME, List.of("key1"))); tryApplyAndExpectApplied(StartBuildingIndexCommand.builder().indexId(indexId(INDEX_NAME)).build()); CatalogHashIndexDescriptor index = (CatalogHashIndexDescriptor) index(manager.latestCatalogVersion(), INDEX_NAME); assertEquals(BUILDING, index.status()); } @Test public void testStartSortedIndexBuilding() { createSomeTable(TABLE_NAME); tryApplyAndExpectApplied(createSortedIndexCommand(INDEX_NAME, List.of("key1"), List.of(ASC_NULLS_LAST))); tryApplyAndExpectApplied(StartBuildingIndexCommand.builder().indexId(indexId(INDEX_NAME)).build()); CatalogSortedIndexDescriptor index = (CatalogSortedIndexDescriptor) index(manager.latestCatalogVersion(), INDEX_NAME); assertEquals(BUILDING, index.status()); } @Test public void testStartBuildingIndexEvent() { createSomeTable(TABLE_NAME); tryApplyAndExpectApplied(createHashIndexCommand(INDEX_NAME, List.of("key1"))); int indexId = index(manager.latestCatalogVersion(), INDEX_NAME).id(); var fireEventFuture = new CompletableFuture<Void>(); manager.listen(CatalogEvent.INDEX_BUILDING, fromConsumer(fireEventFuture, (StartBuildingIndexEventParameters parameters) -> { assertEquals(indexId, parameters.indexId()); })); tryApplyAndExpectApplied(startBuildingIndexCommand(indexId)); assertThat(fireEventFuture, willCompleteSuccessfully()); } @Test public void testIndexEvents() { CatalogCommand createIndexCmd = createHashIndexCommand(INDEX_NAME, List.of("ID")); CatalogCommand dropIndexCmd = DropIndexCommand.builder().schemaName(SCHEMA_NAME).indexName(INDEX_NAME).build(); EventListener<CatalogEventParameters> eventListener = mock(EventListener.class); when(eventListener.notify(any())).thenReturn(falseCompletedFuture()); manager.listen(CatalogEvent.INDEX_CREATE, eventListener); manager.listen(CatalogEvent.INDEX_BUILDING, eventListener); manager.listen(CatalogEvent.INDEX_AVAILABLE, eventListener); manager.listen(CatalogEvent.INDEX_STOPPING, eventListener); manager.listen(CatalogEvent.INDEX_REMOVED, eventListener); // Try to create index without table. assertThat(manager.execute(createIndexCmd), willThrow(CatalogValidationException.class, "Table with name 'PUBLIC.test_table' not found")); verifyNoInteractions(eventListener); // Create table with PK index. assertThat(manager.execute(simpleTable(TABLE_NAME)), willCompleteSuccessfully()); verify(eventListener).notify(any(CreateIndexEventParameters.class)); verifyNoMoreInteractions(eventListener); clearInvocations(eventListener); // Create index. assertThat(manager.execute(createIndexCmd), willCompleteSuccessfully()); verify(eventListener).notify(any(CreateIndexEventParameters.class)); int indexId = indexId(INDEX_NAME); startBuildingIndex(indexId); verify(eventListener).notify(any(StartBuildingIndexEventParameters.class)); makeIndexAvailable(indexId); verify(eventListener).notify(any(MakeIndexAvailableEventParameters.class)); verifyNoMoreInteractions(eventListener); clearInvocations(eventListener); // Drop index. assertThat(manager.execute(dropIndexCmd), willCompleteSuccessfully()); verify(eventListener).notify(any(StoppingIndexEventParameters.class)); // Remove index. removeIndex(indexId); verify(eventListener).notify(any(RemoveIndexEventParameters.class)); verifyNoMoreInteractions(eventListener); clearInvocations(eventListener); // Drop table with pk index. tryApplyAndExpectApplied(dropTableCommand(TABLE_NAME)); // Try drop index once again. assertThat(manager.execute(dropIndexCmd), willThrowFast(CatalogValidationException.class, "Index with name 'PUBLIC.myIndex' not found.")); verify(eventListener).notify(any(RemoveIndexEventParameters.class)); verifyNoMoreInteractions(eventListener); clearInvocations(eventListener); } @Test public void testMakeHashIndexAvailable() { createSomeTable(TABLE_NAME); tryApplyAndExpectApplied(createHashIndexCommand(INDEX_NAME, List.of("key1"))); int indexId = indexId(INDEX_NAME); startBuildingIndex(indexId); makeIndexAvailable(indexId); CatalogHashIndexDescriptor index = (CatalogHashIndexDescriptor) index(manager.latestCatalogVersion(), INDEX_NAME); assertEquals(AVAILABLE, index.status()); } private void makeIndexAvailable(int indexId) { tryApplyAndExpectApplied(MakeIndexAvailableCommand.builder().indexId(indexId).build()); } @Test public void testMakeSortedIndexAvailable() { createSomeTable(TABLE_NAME); tryApplyAndExpectApplied(createSortedIndexCommand(INDEX_NAME, List.of("key1"), List.of(ASC_NULLS_LAST))); int indexId = indexId(INDEX_NAME); startBuildingIndex(indexId); makeIndexAvailable(indexId); CatalogSortedIndexDescriptor index = (CatalogSortedIndexDescriptor) index(manager.latestCatalogVersion(), INDEX_NAME); assertEquals(AVAILABLE, index.status()); } @Test public void testAvailableIndexEvent() { createSomeTable(TABLE_NAME); tryApplyAndExpectApplied(createHashIndexCommand(INDEX_NAME, List.of("key1"))); int indexId = index(manager.latestCatalogVersion(), INDEX_NAME).id(); var fireEventFuture = new CompletableFuture<Void>(); manager.listen(CatalogEvent.INDEX_AVAILABLE, fromConsumer(fireEventFuture, (MakeIndexAvailableEventParameters parameters) -> { assertEquals(indexId, parameters.indexId()); })); tryApplyAndExpectApplied(startBuildingIndexCommand(indexId)); makeIndexAvailable(indexId); assertThat(fireEventFuture, willCompleteSuccessfully()); } @Test public void testPkAvailableOnCreateIndexEvent() { var fireEventFuture = new CompletableFuture<Void>(); manager.listen(CatalogEvent.INDEX_CREATE, fromConsumer(fireEventFuture, (CreateIndexEventParameters parameters) -> { assertEquals(pkIndexName(TABLE_NAME), parameters.indexDescriptor().name()); assertEquals(CatalogIndexDescriptorType.HASH, parameters.indexDescriptor().indexType()); assertEquals(AVAILABLE, parameters.indexDescriptor().status()); assertTrue(parameters.indexDescriptor().unique()); assertTrue(parameters.indexDescriptor().isCreatedWithTable()); })); createSomeTable(TABLE_NAME); assertThat(fireEventFuture, willCompleteSuccessfully()); } @Test public void testCreateIndexWithAlreadyExistingName() { tryApplyAndExpectApplied(simpleTable(TABLE_NAME)); tryApplyAndExpectApplied(simpleIndex()); assertThat( manager.execute(createHashIndexCommand(INDEX_NAME, List.of("VAL"))), willThrowFast(CatalogValidationException.class, "Index with name 'PUBLIC.myIndex' already exists.") ); assertThat( manager.execute(createSortedIndexCommand(INDEX_NAME, List.of("VAL"), List.of(ASC_NULLS_LAST))), willThrowFast(CatalogValidationException.class, "Index with name 'PUBLIC.myIndex' already exists.") ); } @Test public void testCreateIndexWithSameNameAsExistingTable() { tryApplyAndExpectApplied(simpleTable(TABLE_NAME)); assertThat( manager.execute(createHashIndexCommand(TABLE_NAME, List.of("VAL"))), willThrowFast(CatalogValidationException.class, "Table with name 'PUBLIC.test_table' already exists.") ); assertThat( manager.execute(createSortedIndexCommand(TABLE_NAME, List.of("VAL"), List.of(ASC_NULLS_LAST))), willThrowFast(CatalogValidationException.class, "Table with name 'PUBLIC.test_table' already exists.") ); } @Test public void testCreateIndexWithNotExistingTable() { assertThat( manager.execute(createHashIndexCommand(TABLE_NAME, List.of("VAL"))), willThrowFast(CatalogValidationException.class, "Table with name 'PUBLIC.test_table' not found.") ); assertThat( manager.execute(createSortedIndexCommand(TABLE_NAME, List.of("VAL"), List.of(ASC_NULLS_LAST))), willThrowFast(CatalogValidationException.class, "Table with name 'PUBLIC.test_table' not found.") ); } @Test public void testCreateIndexWithMissingTableColumns() { tryApplyAndExpectApplied(simpleTable(TABLE_NAME)); assertThat( manager.execute(createHashIndexCommand(INDEX_NAME, List.of("fake"))), willThrowFast(CatalogValidationException.class, "Column with name 'fake' not found in table 'PUBLIC.test_table'.") ); assertThat( manager.execute(createSortedIndexCommand(INDEX_NAME, List.of("fake"), List.of(ASC_NULLS_LAST))), willThrowFast(CatalogValidationException.class, "Column with name 'fake' not found in table 'PUBLIC.test_table'.") ); } @Test public void testCreateUniqIndexWithMissingTableColocationColumns() { tryApplyAndExpectApplied(simpleTable(TABLE_NAME)); assertThat( manager.execute(createHashIndexCommand(INDEX_NAME, true, List.of("VAL"))), willThrowFast(CatalogValidationException.class, "Unique index must include all colocation columns") ); assertThat( manager.execute(createSortedIndexCommand(INDEX_NAME, true, List.of("VAL"), List.of(ASC_NULLS_LAST))), willThrowFast(CatalogValidationException.class, "Unique index must include all colocation columns") ); } @Test public void testIndexes() { int initialVersion = manager.latestCatalogVersion(); tryApplyAndExpectApplied(simpleTable(TABLE_NAME)); int afterTableCreated = manager.latestCatalogVersion(); tryApplyAndExpectApplied(simpleIndex()); assertThat(manager.catalog(initialVersion).indexes(), empty()); assertThat( manager.catalog(afterTableCreated).indexes(), hasItems(index(afterTableCreated, pkIndexName(TABLE_NAME))) ); int latest = manager.latestCatalogVersion(); assertThat( manager.catalog(latest).indexes(), hasItems(index(latest, pkIndexName(TABLE_NAME)), index(latest, INDEX_NAME)) ); } @Test public void testGetIndexesForTables() { String tableName0 = TABLE_NAME + 0; String tableName1 = TABLE_NAME + 1; createSomeTable(tableName0); createSomeTable(tableName1); createSomeIndex(tableName1, INDEX_NAME); int catalogVersion = manager.latestCatalogVersion(); // Let's check for a non-existent table. assertThat(tableIndexIds(catalogVersion, Integer.MAX_VALUE), empty()); // Let's check for an existing tables. int tableId0 = tableId(tableName0); int tableId1 = tableId(tableName1); assertThat(tableIndexIds(catalogVersion, tableId0), hasItems(indexId(pkIndexName(tableName0)))); assertThat(tableIndexIds(catalogVersion, tableId1), hasItems(indexId(pkIndexName(tableName1)), indexId(INDEX_NAME))); } @Test public void testGetIndexesForTableInSortedOrderById() { createSomeTable(TABLE_NAME); String indexName0 = INDEX_NAME + 0; String indexName1 = INDEX_NAME + 1; createSomeIndex(TABLE_NAME, indexName0); createSomeIndex(TABLE_NAME, indexName1); int indexId0 = indexId(pkIndexName(TABLE_NAME)); int indexId1 = indexId(indexName0); int indexId2 = indexId(indexName1); int catalogVersion = manager.latestCatalogVersion(); assertThat(tableIndexIds(catalogVersion, tableId(TABLE_NAME)), equalTo(List.of(indexId0, indexId1, indexId2))); } @Test public void testRenameIndex() { createSomeTable(TABLE_NAME); createSomeIndex(TABLE_NAME, INDEX_NAME); long beforeRename = clock.nowLong(); CatalogIndexDescriptor index = manager.activeCatalog(beforeRename).aliveIndex(SCHEMA_NAME, INDEX_NAME); assertThat(index, is(notNullValue())); int indexId = index.id(); // Rename index. renameIndex(INDEX_NAME, INDEX_NAME_2); // Ensure index is available by new name. assertThat(manager.activeCatalog(clock.nowLong()).aliveIndex(SCHEMA_NAME, INDEX_NAME), is(nullValue())); index = manager.activeCatalog(clock.nowLong()).aliveIndex(SCHEMA_NAME, INDEX_NAME_2); assertThat(index, is(notNullValue())); assertThat(index.id(), is(indexId)); assertThat(index.name(), is(INDEX_NAME_2)); // Ensure renamed index is available for historical queries. CatalogIndexDescriptor oldDescriptor = manager.activeCatalog(beforeRename).aliveIndex(SCHEMA_NAME, INDEX_NAME); assertThat(oldDescriptor, is(notNullValue())); assertThat(oldDescriptor.id(), is(indexId)); assertThat(manager.activeCatalog(beforeRename).aliveIndex(SCHEMA_NAME, INDEX_NAME_2), is(nullValue())); // Ensure can create new index with same name. createSomeIndex(TABLE_NAME, INDEX_NAME); index = manager.activeCatalog(clock.nowLong()).aliveIndex(SCHEMA_NAME, INDEX_NAME); assertThat(index, is(notNullValue())); assertThat(index.id(), not(indexId)); } @Test public void testRenamePkIndex() { createSomeTable(TABLE_NAME); Catalog catalog = manager.activeCatalog(clock.nowLong()); CatalogTableDescriptor table = catalog.table(SCHEMA_NAME, TABLE_NAME); assertThat(table, is(notNullValue())); assertThat(catalog.aliveIndex(SCHEMA_NAME, pkIndexName(TABLE_NAME)), is(notNullValue())); int primaryKeyIndexId = table.primaryKeyIndexId(); // Rename index. renameIndex(pkIndexName(TABLE_NAME), INDEX_NAME); catalog = manager.activeCatalog(clock.nowLong()); CatalogIndexDescriptor index = catalog.aliveIndex(SCHEMA_NAME, INDEX_NAME); assertThat(index, is(notNullValue())); assertThat(index.id(), is(primaryKeyIndexId)); assertThat(index.name(), is(INDEX_NAME)); assertThat(catalog.aliveIndex(SCHEMA_NAME, pkIndexName(TABLE_NAME)), is(nullValue())); } @Test public void testRenameNonExistingIndex() { createSomeTable(TABLE_NAME); assertThat( manager.execute(RenameIndexCommand.builder().schemaName(SCHEMA_NAME).indexName(INDEX_NAME).newIndexName("TEST").build()), willThrowFast(CatalogValidationException.class) ); } private @Nullable CatalogIndexDescriptor index(int catalogVersion, String indexName) { return manager.catalog(catalogVersion).aliveIndex(SCHEMA_NAME, indexName); } private int indexId(String indexName) { Catalog catalog = manager.activeCatalog(clock.nowLong()); CatalogIndexDescriptor index = catalog.aliveIndex(SCHEMA_NAME, indexName); assertNotNull(index, indexName); return index.id(); } private List<Integer> tableIndexIds(int catalogVersion, int tableId) { Catalog catalog = manager.catalog(catalogVersion); assert catalog != null; return catalog.indexes(tableId).stream().map(CatalogObjectDescriptor::id).collect(toList()); } private int tableId(String tableName) { Catalog catalog = manager.activeCatalog(clock.nowLong()); CatalogTableDescriptor table = catalog.table(SCHEMA_NAME, tableName); assertNotNull(table, tableName); return table.id(); } private void createSomeIndex(String tableName, String indexName) { tryApplyAndExpectApplied(createHashIndexCommand(tableName, indexName, false, List.of("key1"))); } private void createSomeSortedIndex(String tableName, String indexName) { CatalogCommand newSortedIndexCommand = createSortedIndexCommand( SCHEMA_NAME, tableName, indexName, false, List.of("key1"), List.of(ASC_NULLS_LAST)); tryApplyAndExpectApplied(newSortedIndexCommand); } private void renameIndex(String indexName, String newIndexName) { tryApplyAndExpectApplied(renameIndexCommand(indexName, newIndexName)); } private void createTableWithIndex(String tableName, String indexName) { createSomeTable(tableName); createSomeIndex(tableName, indexName); int indexId = indexId(indexName); rollIndexStatusTo(AVAILABLE, indexId); } }
googleapis/google-cloud-java
36,523
java-document-ai/proto-google-cloud-document-ai-v1/src/main/java/com/google/cloud/documentai/v1/ListEvaluationsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/documentai/v1/document_processor_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.documentai.v1; /** * * * <pre> * The response from `ListEvaluations`. * </pre> * * Protobuf type {@code google.cloud.documentai.v1.ListEvaluationsResponse} */ public final class ListEvaluationsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.documentai.v1.ListEvaluationsResponse) ListEvaluationsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListEvaluationsResponse.newBuilder() to construct. private ListEvaluationsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListEvaluationsResponse() { evaluations_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListEvaluationsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.documentai.v1.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1_ListEvaluationsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.documentai.v1.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1_ListEvaluationsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.documentai.v1.ListEvaluationsResponse.class, com.google.cloud.documentai.v1.ListEvaluationsResponse.Builder.class); } public static final int EVALUATIONS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.documentai.v1.Evaluation> evaluations_; /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1.Evaluation evaluations = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.documentai.v1.Evaluation> getEvaluationsList() { return evaluations_; } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1.Evaluation evaluations = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.documentai.v1.EvaluationOrBuilder> getEvaluationsOrBuilderList() { return evaluations_; } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1.Evaluation evaluations = 1;</code> */ @java.lang.Override public int getEvaluationsCount() { return evaluations_.size(); } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1.Evaluation evaluations = 1;</code> */ @java.lang.Override public com.google.cloud.documentai.v1.Evaluation getEvaluations(int index) { return evaluations_.get(index); } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1.Evaluation evaluations = 1;</code> */ @java.lang.Override public com.google.cloud.documentai.v1.EvaluationOrBuilder getEvaluationsOrBuilder(int index) { return evaluations_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < evaluations_.size(); i++) { output.writeMessage(1, evaluations_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < evaluations_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, evaluations_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.documentai.v1.ListEvaluationsResponse)) { return super.equals(obj); } com.google.cloud.documentai.v1.ListEvaluationsResponse other = (com.google.cloud.documentai.v1.ListEvaluationsResponse) obj; if (!getEvaluationsList().equals(other.getEvaluationsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getEvaluationsCount() > 0) { hash = (37 * hash) + EVALUATIONS_FIELD_NUMBER; hash = (53 * hash) + getEvaluationsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.documentai.v1.ListEvaluationsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.documentai.v1.ListEvaluationsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.documentai.v1.ListEvaluationsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.documentai.v1.ListEvaluationsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.documentai.v1.ListEvaluationsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.documentai.v1.ListEvaluationsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.documentai.v1.ListEvaluationsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.documentai.v1.ListEvaluationsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.documentai.v1.ListEvaluationsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.documentai.v1.ListEvaluationsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.documentai.v1.ListEvaluationsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.documentai.v1.ListEvaluationsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.documentai.v1.ListEvaluationsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The response from `ListEvaluations`. * </pre> * * Protobuf type {@code google.cloud.documentai.v1.ListEvaluationsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.documentai.v1.ListEvaluationsResponse) com.google.cloud.documentai.v1.ListEvaluationsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.documentai.v1.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1_ListEvaluationsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.documentai.v1.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1_ListEvaluationsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.documentai.v1.ListEvaluationsResponse.class, com.google.cloud.documentai.v1.ListEvaluationsResponse.Builder.class); } // Construct using com.google.cloud.documentai.v1.ListEvaluationsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (evaluationsBuilder_ == null) { evaluations_ = java.util.Collections.emptyList(); } else { evaluations_ = null; evaluationsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.documentai.v1.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1_ListEvaluationsResponse_descriptor; } @java.lang.Override public com.google.cloud.documentai.v1.ListEvaluationsResponse getDefaultInstanceForType() { return com.google.cloud.documentai.v1.ListEvaluationsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.documentai.v1.ListEvaluationsResponse build() { com.google.cloud.documentai.v1.ListEvaluationsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.documentai.v1.ListEvaluationsResponse buildPartial() { com.google.cloud.documentai.v1.ListEvaluationsResponse result = new com.google.cloud.documentai.v1.ListEvaluationsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.documentai.v1.ListEvaluationsResponse result) { if (evaluationsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { evaluations_ = java.util.Collections.unmodifiableList(evaluations_); bitField0_ = (bitField0_ & ~0x00000001); } result.evaluations_ = evaluations_; } else { result.evaluations_ = evaluationsBuilder_.build(); } } private void buildPartial0(com.google.cloud.documentai.v1.ListEvaluationsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.documentai.v1.ListEvaluationsResponse) { return mergeFrom((com.google.cloud.documentai.v1.ListEvaluationsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.documentai.v1.ListEvaluationsResponse other) { if (other == com.google.cloud.documentai.v1.ListEvaluationsResponse.getDefaultInstance()) return this; if (evaluationsBuilder_ == null) { if (!other.evaluations_.isEmpty()) { if (evaluations_.isEmpty()) { evaluations_ = other.evaluations_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureEvaluationsIsMutable(); evaluations_.addAll(other.evaluations_); } onChanged(); } } else { if (!other.evaluations_.isEmpty()) { if (evaluationsBuilder_.isEmpty()) { evaluationsBuilder_.dispose(); evaluationsBuilder_ = null; evaluations_ = other.evaluations_; bitField0_ = (bitField0_ & ~0x00000001); evaluationsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getEvaluationsFieldBuilder() : null; } else { evaluationsBuilder_.addAllMessages(other.evaluations_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.documentai.v1.Evaluation m = input.readMessage( com.google.cloud.documentai.v1.Evaluation.parser(), extensionRegistry); if (evaluationsBuilder_ == null) { ensureEvaluationsIsMutable(); evaluations_.add(m); } else { evaluationsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.documentai.v1.Evaluation> evaluations_ = java.util.Collections.emptyList(); private void ensureEvaluationsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { evaluations_ = new java.util.ArrayList<com.google.cloud.documentai.v1.Evaluation>(evaluations_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.documentai.v1.Evaluation, com.google.cloud.documentai.v1.Evaluation.Builder, com.google.cloud.documentai.v1.EvaluationOrBuilder> evaluationsBuilder_; /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1.Evaluation evaluations = 1;</code> */ public java.util.List<com.google.cloud.documentai.v1.Evaluation> getEvaluationsList() { if (evaluationsBuilder_ == null) { return java.util.Collections.unmodifiableList(evaluations_); } else { return evaluationsBuilder_.getMessageList(); } } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1.Evaluation evaluations = 1;</code> */ public int getEvaluationsCount() { if (evaluationsBuilder_ == null) { return evaluations_.size(); } else { return evaluationsBuilder_.getCount(); } } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1.Evaluation evaluations = 1;</code> */ public com.google.cloud.documentai.v1.Evaluation getEvaluations(int index) { if (evaluationsBuilder_ == null) { return evaluations_.get(index); } else { return evaluationsBuilder_.getMessage(index); } } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1.Evaluation evaluations = 1;</code> */ public Builder setEvaluations(int index, com.google.cloud.documentai.v1.Evaluation value) { if (evaluationsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureEvaluationsIsMutable(); evaluations_.set(index, value); onChanged(); } else { evaluationsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1.Evaluation evaluations = 1;</code> */ public Builder setEvaluations( int index, com.google.cloud.documentai.v1.Evaluation.Builder builderForValue) { if (evaluationsBuilder_ == null) { ensureEvaluationsIsMutable(); evaluations_.set(index, builderForValue.build()); onChanged(); } else { evaluationsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1.Evaluation evaluations = 1;</code> */ public Builder addEvaluations(com.google.cloud.documentai.v1.Evaluation value) { if (evaluationsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureEvaluationsIsMutable(); evaluations_.add(value); onChanged(); } else { evaluationsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1.Evaluation evaluations = 1;</code> */ public Builder addEvaluations(int index, com.google.cloud.documentai.v1.Evaluation value) { if (evaluationsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureEvaluationsIsMutable(); evaluations_.add(index, value); onChanged(); } else { evaluationsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1.Evaluation evaluations = 1;</code> */ public Builder addEvaluations( com.google.cloud.documentai.v1.Evaluation.Builder builderForValue) { if (evaluationsBuilder_ == null) { ensureEvaluationsIsMutable(); evaluations_.add(builderForValue.build()); onChanged(); } else { evaluationsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1.Evaluation evaluations = 1;</code> */ public Builder addEvaluations( int index, com.google.cloud.documentai.v1.Evaluation.Builder builderForValue) { if (evaluationsBuilder_ == null) { ensureEvaluationsIsMutable(); evaluations_.add(index, builderForValue.build()); onChanged(); } else { evaluationsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1.Evaluation evaluations = 1;</code> */ public Builder addAllEvaluations( java.lang.Iterable<? extends com.google.cloud.documentai.v1.Evaluation> values) { if (evaluationsBuilder_ == null) { ensureEvaluationsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, evaluations_); onChanged(); } else { evaluationsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1.Evaluation evaluations = 1;</code> */ public Builder clearEvaluations() { if (evaluationsBuilder_ == null) { evaluations_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { evaluationsBuilder_.clear(); } return this; } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1.Evaluation evaluations = 1;</code> */ public Builder removeEvaluations(int index) { if (evaluationsBuilder_ == null) { ensureEvaluationsIsMutable(); evaluations_.remove(index); onChanged(); } else { evaluationsBuilder_.remove(index); } return this; } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1.Evaluation evaluations = 1;</code> */ public com.google.cloud.documentai.v1.Evaluation.Builder getEvaluationsBuilder(int index) { return getEvaluationsFieldBuilder().getBuilder(index); } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1.Evaluation evaluations = 1;</code> */ public com.google.cloud.documentai.v1.EvaluationOrBuilder getEvaluationsOrBuilder(int index) { if (evaluationsBuilder_ == null) { return evaluations_.get(index); } else { return evaluationsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1.Evaluation evaluations = 1;</code> */ public java.util.List<? extends com.google.cloud.documentai.v1.EvaluationOrBuilder> getEvaluationsOrBuilderList() { if (evaluationsBuilder_ != null) { return evaluationsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(evaluations_); } } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1.Evaluation evaluations = 1;</code> */ public com.google.cloud.documentai.v1.Evaluation.Builder addEvaluationsBuilder() { return getEvaluationsFieldBuilder() .addBuilder(com.google.cloud.documentai.v1.Evaluation.getDefaultInstance()); } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1.Evaluation evaluations = 1;</code> */ public com.google.cloud.documentai.v1.Evaluation.Builder addEvaluationsBuilder(int index) { return getEvaluationsFieldBuilder() .addBuilder(index, com.google.cloud.documentai.v1.Evaluation.getDefaultInstance()); } /** * * * <pre> * The evaluations requested. * </pre> * * <code>repeated .google.cloud.documentai.v1.Evaluation evaluations = 1;</code> */ public java.util.List<com.google.cloud.documentai.v1.Evaluation.Builder> getEvaluationsBuilderList() { return getEvaluationsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.documentai.v1.Evaluation, com.google.cloud.documentai.v1.Evaluation.Builder, com.google.cloud.documentai.v1.EvaluationOrBuilder> getEvaluationsFieldBuilder() { if (evaluationsBuilder_ == null) { evaluationsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.documentai.v1.Evaluation, com.google.cloud.documentai.v1.Evaluation.Builder, com.google.cloud.documentai.v1.EvaluationOrBuilder>( evaluations_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); evaluations_ = null; } return evaluationsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.documentai.v1.ListEvaluationsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.documentai.v1.ListEvaluationsResponse) private static final com.google.cloud.documentai.v1.ListEvaluationsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.documentai.v1.ListEvaluationsResponse(); } public static com.google.cloud.documentai.v1.ListEvaluationsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListEvaluationsResponse> PARSER = new com.google.protobuf.AbstractParser<ListEvaluationsResponse>() { @java.lang.Override public ListEvaluationsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListEvaluationsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListEvaluationsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.documentai.v1.ListEvaluationsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,533
java-iam-admin/proto-google-iam-admin-v1/src/main/java/com/google/iam/admin/v1/QueryTestablePermissionsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/iam/admin/v1/iam.proto // Protobuf Java Version: 3.25.8 package com.google.iam.admin.v1; /** * * * <pre> * The response containing permissions which can be tested on a resource. * </pre> * * Protobuf type {@code google.iam.admin.v1.QueryTestablePermissionsResponse} */ public final class QueryTestablePermissionsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.iam.admin.v1.QueryTestablePermissionsResponse) QueryTestablePermissionsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use QueryTestablePermissionsResponse.newBuilder() to construct. private QueryTestablePermissionsResponse( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private QueryTestablePermissionsResponse() { permissions_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new QueryTestablePermissionsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.iam.admin.v1.Iam .internal_static_google_iam_admin_v1_QueryTestablePermissionsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.iam.admin.v1.Iam .internal_static_google_iam_admin_v1_QueryTestablePermissionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.iam.admin.v1.QueryTestablePermissionsResponse.class, com.google.iam.admin.v1.QueryTestablePermissionsResponse.Builder.class); } public static final int PERMISSIONS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.iam.admin.v1.Permission> permissions_; /** * * * <pre> * The Permissions testable on the requested resource. * </pre> * * <code>repeated .google.iam.admin.v1.Permission permissions = 1;</code> */ @java.lang.Override public java.util.List<com.google.iam.admin.v1.Permission> getPermissionsList() { return permissions_; } /** * * * <pre> * The Permissions testable on the requested resource. * </pre> * * <code>repeated .google.iam.admin.v1.Permission permissions = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.iam.admin.v1.PermissionOrBuilder> getPermissionsOrBuilderList() { return permissions_; } /** * * * <pre> * The Permissions testable on the requested resource. * </pre> * * <code>repeated .google.iam.admin.v1.Permission permissions = 1;</code> */ @java.lang.Override public int getPermissionsCount() { return permissions_.size(); } /** * * * <pre> * The Permissions testable on the requested resource. * </pre> * * <code>repeated .google.iam.admin.v1.Permission permissions = 1;</code> */ @java.lang.Override public com.google.iam.admin.v1.Permission getPermissions(int index) { return permissions_.get(index); } /** * * * <pre> * The Permissions testable on the requested resource. * </pre> * * <code>repeated .google.iam.admin.v1.Permission permissions = 1;</code> */ @java.lang.Override public com.google.iam.admin.v1.PermissionOrBuilder getPermissionsOrBuilder(int index) { return permissions_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * To retrieve the next page of results, set * `QueryTestableRolesRequest.page_token` to this value. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * To retrieve the next page of results, set * `QueryTestableRolesRequest.page_token` to this value. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < permissions_.size(); i++) { output.writeMessage(1, permissions_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < permissions_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, permissions_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.iam.admin.v1.QueryTestablePermissionsResponse)) { return super.equals(obj); } com.google.iam.admin.v1.QueryTestablePermissionsResponse other = (com.google.iam.admin.v1.QueryTestablePermissionsResponse) obj; if (!getPermissionsList().equals(other.getPermissionsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getPermissionsCount() > 0) { hash = (37 * hash) + PERMISSIONS_FIELD_NUMBER; hash = (53 * hash) + getPermissionsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.iam.admin.v1.QueryTestablePermissionsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.iam.admin.v1.QueryTestablePermissionsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.iam.admin.v1.QueryTestablePermissionsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.iam.admin.v1.QueryTestablePermissionsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.iam.admin.v1.QueryTestablePermissionsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.iam.admin.v1.QueryTestablePermissionsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.iam.admin.v1.QueryTestablePermissionsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.iam.admin.v1.QueryTestablePermissionsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.iam.admin.v1.QueryTestablePermissionsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.iam.admin.v1.QueryTestablePermissionsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.iam.admin.v1.QueryTestablePermissionsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.iam.admin.v1.QueryTestablePermissionsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.iam.admin.v1.QueryTestablePermissionsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The response containing permissions which can be tested on a resource. * </pre> * * Protobuf type {@code google.iam.admin.v1.QueryTestablePermissionsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.iam.admin.v1.QueryTestablePermissionsResponse) com.google.iam.admin.v1.QueryTestablePermissionsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.iam.admin.v1.Iam .internal_static_google_iam_admin_v1_QueryTestablePermissionsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.iam.admin.v1.Iam .internal_static_google_iam_admin_v1_QueryTestablePermissionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.iam.admin.v1.QueryTestablePermissionsResponse.class, com.google.iam.admin.v1.QueryTestablePermissionsResponse.Builder.class); } // Construct using com.google.iam.admin.v1.QueryTestablePermissionsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (permissionsBuilder_ == null) { permissions_ = java.util.Collections.emptyList(); } else { permissions_ = null; permissionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.iam.admin.v1.Iam .internal_static_google_iam_admin_v1_QueryTestablePermissionsResponse_descriptor; } @java.lang.Override public com.google.iam.admin.v1.QueryTestablePermissionsResponse getDefaultInstanceForType() { return com.google.iam.admin.v1.QueryTestablePermissionsResponse.getDefaultInstance(); } @java.lang.Override public com.google.iam.admin.v1.QueryTestablePermissionsResponse build() { com.google.iam.admin.v1.QueryTestablePermissionsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.iam.admin.v1.QueryTestablePermissionsResponse buildPartial() { com.google.iam.admin.v1.QueryTestablePermissionsResponse result = new com.google.iam.admin.v1.QueryTestablePermissionsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.iam.admin.v1.QueryTestablePermissionsResponse result) { if (permissionsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { permissions_ = java.util.Collections.unmodifiableList(permissions_); bitField0_ = (bitField0_ & ~0x00000001); } result.permissions_ = permissions_; } else { result.permissions_ = permissionsBuilder_.build(); } } private void buildPartial0(com.google.iam.admin.v1.QueryTestablePermissionsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.iam.admin.v1.QueryTestablePermissionsResponse) { return mergeFrom((com.google.iam.admin.v1.QueryTestablePermissionsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.iam.admin.v1.QueryTestablePermissionsResponse other) { if (other == com.google.iam.admin.v1.QueryTestablePermissionsResponse.getDefaultInstance()) return this; if (permissionsBuilder_ == null) { if (!other.permissions_.isEmpty()) { if (permissions_.isEmpty()) { permissions_ = other.permissions_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensurePermissionsIsMutable(); permissions_.addAll(other.permissions_); } onChanged(); } } else { if (!other.permissions_.isEmpty()) { if (permissionsBuilder_.isEmpty()) { permissionsBuilder_.dispose(); permissionsBuilder_ = null; permissions_ = other.permissions_; bitField0_ = (bitField0_ & ~0x00000001); permissionsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getPermissionsFieldBuilder() : null; } else { permissionsBuilder_.addAllMessages(other.permissions_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.iam.admin.v1.Permission m = input.readMessage( com.google.iam.admin.v1.Permission.parser(), extensionRegistry); if (permissionsBuilder_ == null) { ensurePermissionsIsMutable(); permissions_.add(m); } else { permissionsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.iam.admin.v1.Permission> permissions_ = java.util.Collections.emptyList(); private void ensurePermissionsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { permissions_ = new java.util.ArrayList<com.google.iam.admin.v1.Permission>(permissions_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.iam.admin.v1.Permission, com.google.iam.admin.v1.Permission.Builder, com.google.iam.admin.v1.PermissionOrBuilder> permissionsBuilder_; /** * * * <pre> * The Permissions testable on the requested resource. * </pre> * * <code>repeated .google.iam.admin.v1.Permission permissions = 1;</code> */ public java.util.List<com.google.iam.admin.v1.Permission> getPermissionsList() { if (permissionsBuilder_ == null) { return java.util.Collections.unmodifiableList(permissions_); } else { return permissionsBuilder_.getMessageList(); } } /** * * * <pre> * The Permissions testable on the requested resource. * </pre> * * <code>repeated .google.iam.admin.v1.Permission permissions = 1;</code> */ public int getPermissionsCount() { if (permissionsBuilder_ == null) { return permissions_.size(); } else { return permissionsBuilder_.getCount(); } } /** * * * <pre> * The Permissions testable on the requested resource. * </pre> * * <code>repeated .google.iam.admin.v1.Permission permissions = 1;</code> */ public com.google.iam.admin.v1.Permission getPermissions(int index) { if (permissionsBuilder_ == null) { return permissions_.get(index); } else { return permissionsBuilder_.getMessage(index); } } /** * * * <pre> * The Permissions testable on the requested resource. * </pre> * * <code>repeated .google.iam.admin.v1.Permission permissions = 1;</code> */ public Builder setPermissions(int index, com.google.iam.admin.v1.Permission value) { if (permissionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePermissionsIsMutable(); permissions_.set(index, value); onChanged(); } else { permissionsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The Permissions testable on the requested resource. * </pre> * * <code>repeated .google.iam.admin.v1.Permission permissions = 1;</code> */ public Builder setPermissions( int index, com.google.iam.admin.v1.Permission.Builder builderForValue) { if (permissionsBuilder_ == null) { ensurePermissionsIsMutable(); permissions_.set(index, builderForValue.build()); onChanged(); } else { permissionsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The Permissions testable on the requested resource. * </pre> * * <code>repeated .google.iam.admin.v1.Permission permissions = 1;</code> */ public Builder addPermissions(com.google.iam.admin.v1.Permission value) { if (permissionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePermissionsIsMutable(); permissions_.add(value); onChanged(); } else { permissionsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The Permissions testable on the requested resource. * </pre> * * <code>repeated .google.iam.admin.v1.Permission permissions = 1;</code> */ public Builder addPermissions(int index, com.google.iam.admin.v1.Permission value) { if (permissionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePermissionsIsMutable(); permissions_.add(index, value); onChanged(); } else { permissionsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The Permissions testable on the requested resource. * </pre> * * <code>repeated .google.iam.admin.v1.Permission permissions = 1;</code> */ public Builder addPermissions(com.google.iam.admin.v1.Permission.Builder builderForValue) { if (permissionsBuilder_ == null) { ensurePermissionsIsMutable(); permissions_.add(builderForValue.build()); onChanged(); } else { permissionsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The Permissions testable on the requested resource. * </pre> * * <code>repeated .google.iam.admin.v1.Permission permissions = 1;</code> */ public Builder addPermissions( int index, com.google.iam.admin.v1.Permission.Builder builderForValue) { if (permissionsBuilder_ == null) { ensurePermissionsIsMutable(); permissions_.add(index, builderForValue.build()); onChanged(); } else { permissionsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The Permissions testable on the requested resource. * </pre> * * <code>repeated .google.iam.admin.v1.Permission permissions = 1;</code> */ public Builder addAllPermissions( java.lang.Iterable<? extends com.google.iam.admin.v1.Permission> values) { if (permissionsBuilder_ == null) { ensurePermissionsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, permissions_); onChanged(); } else { permissionsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The Permissions testable on the requested resource. * </pre> * * <code>repeated .google.iam.admin.v1.Permission permissions = 1;</code> */ public Builder clearPermissions() { if (permissionsBuilder_ == null) { permissions_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { permissionsBuilder_.clear(); } return this; } /** * * * <pre> * The Permissions testable on the requested resource. * </pre> * * <code>repeated .google.iam.admin.v1.Permission permissions = 1;</code> */ public Builder removePermissions(int index) { if (permissionsBuilder_ == null) { ensurePermissionsIsMutable(); permissions_.remove(index); onChanged(); } else { permissionsBuilder_.remove(index); } return this; } /** * * * <pre> * The Permissions testable on the requested resource. * </pre> * * <code>repeated .google.iam.admin.v1.Permission permissions = 1;</code> */ public com.google.iam.admin.v1.Permission.Builder getPermissionsBuilder(int index) { return getPermissionsFieldBuilder().getBuilder(index); } /** * * * <pre> * The Permissions testable on the requested resource. * </pre> * * <code>repeated .google.iam.admin.v1.Permission permissions = 1;</code> */ public com.google.iam.admin.v1.PermissionOrBuilder getPermissionsOrBuilder(int index) { if (permissionsBuilder_ == null) { return permissions_.get(index); } else { return permissionsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The Permissions testable on the requested resource. * </pre> * * <code>repeated .google.iam.admin.v1.Permission permissions = 1;</code> */ public java.util.List<? extends com.google.iam.admin.v1.PermissionOrBuilder> getPermissionsOrBuilderList() { if (permissionsBuilder_ != null) { return permissionsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(permissions_); } } /** * * * <pre> * The Permissions testable on the requested resource. * </pre> * * <code>repeated .google.iam.admin.v1.Permission permissions = 1;</code> */ public com.google.iam.admin.v1.Permission.Builder addPermissionsBuilder() { return getPermissionsFieldBuilder() .addBuilder(com.google.iam.admin.v1.Permission.getDefaultInstance()); } /** * * * <pre> * The Permissions testable on the requested resource. * </pre> * * <code>repeated .google.iam.admin.v1.Permission permissions = 1;</code> */ public com.google.iam.admin.v1.Permission.Builder addPermissionsBuilder(int index) { return getPermissionsFieldBuilder() .addBuilder(index, com.google.iam.admin.v1.Permission.getDefaultInstance()); } /** * * * <pre> * The Permissions testable on the requested resource. * </pre> * * <code>repeated .google.iam.admin.v1.Permission permissions = 1;</code> */ public java.util.List<com.google.iam.admin.v1.Permission.Builder> getPermissionsBuilderList() { return getPermissionsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.iam.admin.v1.Permission, com.google.iam.admin.v1.Permission.Builder, com.google.iam.admin.v1.PermissionOrBuilder> getPermissionsFieldBuilder() { if (permissionsBuilder_ == null) { permissionsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.iam.admin.v1.Permission, com.google.iam.admin.v1.Permission.Builder, com.google.iam.admin.v1.PermissionOrBuilder>( permissions_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); permissions_ = null; } return permissionsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * To retrieve the next page of results, set * `QueryTestableRolesRequest.page_token` to this value. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * To retrieve the next page of results, set * `QueryTestableRolesRequest.page_token` to this value. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * To retrieve the next page of results, set * `QueryTestableRolesRequest.page_token` to this value. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * To retrieve the next page of results, set * `QueryTestableRolesRequest.page_token` to this value. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * To retrieve the next page of results, set * `QueryTestableRolesRequest.page_token` to this value. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.iam.admin.v1.QueryTestablePermissionsResponse) } // @@protoc_insertion_point(class_scope:google.iam.admin.v1.QueryTestablePermissionsResponse) private static final com.google.iam.admin.v1.QueryTestablePermissionsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.iam.admin.v1.QueryTestablePermissionsResponse(); } public static com.google.iam.admin.v1.QueryTestablePermissionsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<QueryTestablePermissionsResponse> PARSER = new com.google.protobuf.AbstractParser<QueryTestablePermissionsResponse>() { @java.lang.Override public QueryTestablePermissionsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<QueryTestablePermissionsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<QueryTestablePermissionsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.iam.admin.v1.QueryTestablePermissionsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
36,712
google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/services/ConversionGoalCampaignConfigOperation.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v19/services/conversion_goal_campaign_config_service.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v19.services; /** * <pre> * A single operation (update) on a conversion goal campaign config. * </pre> * * Protobuf type {@code google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation} */ public final class ConversionGoalCampaignConfigOperation extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation) ConversionGoalCampaignConfigOperationOrBuilder { private static final long serialVersionUID = 0L; // Use ConversionGoalCampaignConfigOperation.newBuilder() to construct. private ConversionGoalCampaignConfigOperation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ConversionGoalCampaignConfigOperation() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ConversionGoalCampaignConfigOperation(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigServiceProto.internal_static_google_ads_googleads_v19_services_ConversionGoalCampaignConfigOperation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigServiceProto.internal_static_google_ads_googleads_v19_services_ConversionGoalCampaignConfigOperation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation.class, com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation.Builder.class); } private int bitField0_; private int operationCase_ = 0; @SuppressWarnings("serial") private java.lang.Object operation_; public enum OperationCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { UPDATE(1), OPERATION_NOT_SET(0); private final int value; private OperationCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static OperationCase valueOf(int value) { return forNumber(value); } public static OperationCase forNumber(int value) { switch (value) { case 1: return UPDATE; case 0: return OPERATION_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public OperationCase getOperationCase() { return OperationCase.forNumber( operationCase_); } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } public static final int UPDATE_FIELD_NUMBER = 1; /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig update = 1;</code> * @return Whether the update field is set. */ @java.lang.Override public boolean hasUpdate() { return operationCase_ == 1; } /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig update = 1;</code> * @return The update. */ @java.lang.Override public com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig getUpdate() { if (operationCase_ == 1) { return (com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig) operation_; } return com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig.getDefaultInstance(); } /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig update = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfigOrBuilder getUpdateOrBuilder() { if (operationCase_ == 1) { return (com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig) operation_; } return com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (operationCase_ == 1) { output.writeMessage(1, (com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig) operation_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (operationCase_ == 1) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, (com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig) operation_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation)) { return super.equals(obj); } com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation other = (com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation) obj; if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask() .equals(other.getUpdateMask())) return false; } if (!getOperationCase().equals(other.getOperationCase())) return false; switch (operationCase_) { case 1: if (!getUpdate() .equals(other.getUpdate())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } switch (operationCase_) { case 1: hash = (37 * hash) + UPDATE_FIELD_NUMBER; hash = (53 * hash) + getUpdate().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * A single operation (update) on a conversion goal campaign config. * </pre> * * Protobuf type {@code google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation) com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperationOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigServiceProto.internal_static_google_ads_googleads_v19_services_ConversionGoalCampaignConfigOperation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigServiceProto.internal_static_google_ads_googleads_v19_services_ConversionGoalCampaignConfigOperation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation.class, com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation.Builder.class); } // Construct using com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } if (updateBuilder_ != null) { updateBuilder_.clear(); } operationCase_ = 0; operation_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigServiceProto.internal_static_google_ads_googleads_v19_services_ConversionGoalCampaignConfigOperation_descriptor; } @java.lang.Override public com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation getDefaultInstanceForType() { return com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation build() { com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation buildPartial() { com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation result = new com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } private void buildPartialOneofs(com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation result) { result.operationCase_ = operationCase_; result.operation_ = this.operation_; if (operationCase_ == 1 && updateBuilder_ != null) { result.operation_ = updateBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation) { return mergeFrom((com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation other) { if (other == com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation.getDefaultInstance()) return this; if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } switch (other.getOperationCase()) { case UPDATE: { mergeUpdate(other.getUpdate()); break; } case OPERATION_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getUpdateFieldBuilder().getBuilder(), extensionRegistry); operationCase_ = 1; break; } // case 10 case 18: { input.readMessage( getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int operationCase_ = 0; private java.lang.Object operation_; public OperationCase getOperationCase() { return OperationCase.forNumber( operationCase_); } public Builder clearOperation() { operationCase_ = 0; operation_ = null; onChanged(); return this; } private int bitField0_; private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask( com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000001); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000001; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig, com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig.Builder, com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfigOrBuilder> updateBuilder_; /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig update = 1;</code> * @return Whether the update field is set. */ @java.lang.Override public boolean hasUpdate() { return operationCase_ == 1; } /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig update = 1;</code> * @return The update. */ @java.lang.Override public com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig getUpdate() { if (updateBuilder_ == null) { if (operationCase_ == 1) { return (com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig) operation_; } return com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig.getDefaultInstance(); } else { if (operationCase_ == 1) { return updateBuilder_.getMessage(); } return com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig.getDefaultInstance(); } } /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig update = 1;</code> */ public Builder setUpdate(com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig value) { if (updateBuilder_ == null) { if (value == null) { throw new NullPointerException(); } operation_ = value; onChanged(); } else { updateBuilder_.setMessage(value); } operationCase_ = 1; return this; } /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig update = 1;</code> */ public Builder setUpdate( com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig.Builder builderForValue) { if (updateBuilder_ == null) { operation_ = builderForValue.build(); onChanged(); } else { updateBuilder_.setMessage(builderForValue.build()); } operationCase_ = 1; return this; } /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig update = 1;</code> */ public Builder mergeUpdate(com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig value) { if (updateBuilder_ == null) { if (operationCase_ == 1 && operation_ != com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig.getDefaultInstance()) { operation_ = com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig.newBuilder((com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig) operation_) .mergeFrom(value).buildPartial(); } else { operation_ = value; } onChanged(); } else { if (operationCase_ == 1) { updateBuilder_.mergeFrom(value); } else { updateBuilder_.setMessage(value); } } operationCase_ = 1; return this; } /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig update = 1;</code> */ public Builder clearUpdate() { if (updateBuilder_ == null) { if (operationCase_ == 1) { operationCase_ = 0; operation_ = null; onChanged(); } } else { if (operationCase_ == 1) { operationCase_ = 0; operation_ = null; } updateBuilder_.clear(); } return this; } /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig update = 1;</code> */ public com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig.Builder getUpdateBuilder() { return getUpdateFieldBuilder().getBuilder(); } /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig update = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfigOrBuilder getUpdateOrBuilder() { if ((operationCase_ == 1) && (updateBuilder_ != null)) { return updateBuilder_.getMessageOrBuilder(); } else { if (operationCase_ == 1) { return (com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig) operation_; } return com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig.getDefaultInstance(); } } /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig update = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig, com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig.Builder, com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfigOrBuilder> getUpdateFieldBuilder() { if (updateBuilder_ == null) { if (!(operationCase_ == 1)) { operation_ = com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig.getDefaultInstance(); } updateBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig, com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig.Builder, com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfigOrBuilder>( (com.google.ads.googleads.v19.resources.ConversionGoalCampaignConfig) operation_, getParentForChildren(), isClean()); operation_ = null; } operationCase_ = 1; onChanged(); return updateBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation) private static final com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation(); } public static com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ConversionGoalCampaignConfigOperation> PARSER = new com.google.protobuf.AbstractParser<ConversionGoalCampaignConfigOperation>() { @java.lang.Override public ConversionGoalCampaignConfigOperation parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ConversionGoalCampaignConfigOperation> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ConversionGoalCampaignConfigOperation> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v19.services.ConversionGoalCampaignConfigOperation getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
36,712
google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/services/ConversionGoalCampaignConfigOperation.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v20/services/conversion_goal_campaign_config_service.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v20.services; /** * <pre> * A single operation (update) on a conversion goal campaign config. * </pre> * * Protobuf type {@code google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation} */ public final class ConversionGoalCampaignConfigOperation extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation) ConversionGoalCampaignConfigOperationOrBuilder { private static final long serialVersionUID = 0L; // Use ConversionGoalCampaignConfigOperation.newBuilder() to construct. private ConversionGoalCampaignConfigOperation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ConversionGoalCampaignConfigOperation() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ConversionGoalCampaignConfigOperation(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigServiceProto.internal_static_google_ads_googleads_v20_services_ConversionGoalCampaignConfigOperation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigServiceProto.internal_static_google_ads_googleads_v20_services_ConversionGoalCampaignConfigOperation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation.class, com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation.Builder.class); } private int bitField0_; private int operationCase_ = 0; @SuppressWarnings("serial") private java.lang.Object operation_; public enum OperationCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { UPDATE(1), OPERATION_NOT_SET(0); private final int value; private OperationCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static OperationCase valueOf(int value) { return forNumber(value); } public static OperationCase forNumber(int value) { switch (value) { case 1: return UPDATE; case 0: return OPERATION_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public OperationCase getOperationCase() { return OperationCase.forNumber( operationCase_); } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } public static final int UPDATE_FIELD_NUMBER = 1; /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig update = 1;</code> * @return Whether the update field is set. */ @java.lang.Override public boolean hasUpdate() { return operationCase_ == 1; } /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig update = 1;</code> * @return The update. */ @java.lang.Override public com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig getUpdate() { if (operationCase_ == 1) { return (com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig) operation_; } return com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig.getDefaultInstance(); } /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig update = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfigOrBuilder getUpdateOrBuilder() { if (operationCase_ == 1) { return (com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig) operation_; } return com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (operationCase_ == 1) { output.writeMessage(1, (com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig) operation_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (operationCase_ == 1) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, (com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig) operation_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation)) { return super.equals(obj); } com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation other = (com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation) obj; if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask() .equals(other.getUpdateMask())) return false; } if (!getOperationCase().equals(other.getOperationCase())) return false; switch (operationCase_) { case 1: if (!getUpdate() .equals(other.getUpdate())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } switch (operationCase_) { case 1: hash = (37 * hash) + UPDATE_FIELD_NUMBER; hash = (53 * hash) + getUpdate().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * A single operation (update) on a conversion goal campaign config. * </pre> * * Protobuf type {@code google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation) com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperationOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigServiceProto.internal_static_google_ads_googleads_v20_services_ConversionGoalCampaignConfigOperation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigServiceProto.internal_static_google_ads_googleads_v20_services_ConversionGoalCampaignConfigOperation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation.class, com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation.Builder.class); } // Construct using com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } if (updateBuilder_ != null) { updateBuilder_.clear(); } operationCase_ = 0; operation_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigServiceProto.internal_static_google_ads_googleads_v20_services_ConversionGoalCampaignConfigOperation_descriptor; } @java.lang.Override public com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation getDefaultInstanceForType() { return com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation build() { com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation buildPartial() { com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation result = new com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } private void buildPartialOneofs(com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation result) { result.operationCase_ = operationCase_; result.operation_ = this.operation_; if (operationCase_ == 1 && updateBuilder_ != null) { result.operation_ = updateBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation) { return mergeFrom((com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation other) { if (other == com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation.getDefaultInstance()) return this; if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } switch (other.getOperationCase()) { case UPDATE: { mergeUpdate(other.getUpdate()); break; } case OPERATION_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getUpdateFieldBuilder().getBuilder(), extensionRegistry); operationCase_ = 1; break; } // case 10 case 18: { input.readMessage( getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int operationCase_ = 0; private java.lang.Object operation_; public OperationCase getOperationCase() { return OperationCase.forNumber( operationCase_); } public Builder clearOperation() { operationCase_ = 0; operation_ = null; onChanged(); return this; } private int bitField0_; private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask( com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000001); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000001; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig, com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig.Builder, com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfigOrBuilder> updateBuilder_; /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig update = 1;</code> * @return Whether the update field is set. */ @java.lang.Override public boolean hasUpdate() { return operationCase_ == 1; } /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig update = 1;</code> * @return The update. */ @java.lang.Override public com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig getUpdate() { if (updateBuilder_ == null) { if (operationCase_ == 1) { return (com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig) operation_; } return com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig.getDefaultInstance(); } else { if (operationCase_ == 1) { return updateBuilder_.getMessage(); } return com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig.getDefaultInstance(); } } /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig update = 1;</code> */ public Builder setUpdate(com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig value) { if (updateBuilder_ == null) { if (value == null) { throw new NullPointerException(); } operation_ = value; onChanged(); } else { updateBuilder_.setMessage(value); } operationCase_ = 1; return this; } /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig update = 1;</code> */ public Builder setUpdate( com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig.Builder builderForValue) { if (updateBuilder_ == null) { operation_ = builderForValue.build(); onChanged(); } else { updateBuilder_.setMessage(builderForValue.build()); } operationCase_ = 1; return this; } /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig update = 1;</code> */ public Builder mergeUpdate(com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig value) { if (updateBuilder_ == null) { if (operationCase_ == 1 && operation_ != com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig.getDefaultInstance()) { operation_ = com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig.newBuilder((com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig) operation_) .mergeFrom(value).buildPartial(); } else { operation_ = value; } onChanged(); } else { if (operationCase_ == 1) { updateBuilder_.mergeFrom(value); } else { updateBuilder_.setMessage(value); } } operationCase_ = 1; return this; } /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig update = 1;</code> */ public Builder clearUpdate() { if (updateBuilder_ == null) { if (operationCase_ == 1) { operationCase_ = 0; operation_ = null; onChanged(); } } else { if (operationCase_ == 1) { operationCase_ = 0; operation_ = null; } updateBuilder_.clear(); } return this; } /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig update = 1;</code> */ public com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig.Builder getUpdateBuilder() { return getUpdateFieldBuilder().getBuilder(); } /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig update = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfigOrBuilder getUpdateOrBuilder() { if ((operationCase_ == 1) && (updateBuilder_ != null)) { return updateBuilder_.getMessageOrBuilder(); } else { if (operationCase_ == 1) { return (com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig) operation_; } return com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig.getDefaultInstance(); } } /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig update = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig, com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig.Builder, com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfigOrBuilder> getUpdateFieldBuilder() { if (updateBuilder_ == null) { if (!(operationCase_ == 1)) { operation_ = com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig.getDefaultInstance(); } updateBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig, com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig.Builder, com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfigOrBuilder>( (com.google.ads.googleads.v20.resources.ConversionGoalCampaignConfig) operation_, getParentForChildren(), isClean()); operation_ = null; } operationCase_ = 1; onChanged(); return updateBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation) private static final com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation(); } public static com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ConversionGoalCampaignConfigOperation> PARSER = new com.google.protobuf.AbstractParser<ConversionGoalCampaignConfigOperation>() { @java.lang.Override public ConversionGoalCampaignConfigOperation parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ConversionGoalCampaignConfigOperation> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ConversionGoalCampaignConfigOperation> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v20.services.ConversionGoalCampaignConfigOperation getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
36,712
google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/services/ConversionGoalCampaignConfigOperation.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v21/services/conversion_goal_campaign_config_service.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v21.services; /** * <pre> * A single operation (update) on a conversion goal campaign config. * </pre> * * Protobuf type {@code google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation} */ public final class ConversionGoalCampaignConfigOperation extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation) ConversionGoalCampaignConfigOperationOrBuilder { private static final long serialVersionUID = 0L; // Use ConversionGoalCampaignConfigOperation.newBuilder() to construct. private ConversionGoalCampaignConfigOperation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ConversionGoalCampaignConfigOperation() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new ConversionGoalCampaignConfigOperation(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigServiceProto.internal_static_google_ads_googleads_v21_services_ConversionGoalCampaignConfigOperation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigServiceProto.internal_static_google_ads_googleads_v21_services_ConversionGoalCampaignConfigOperation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation.class, com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation.Builder.class); } private int bitField0_; private int operationCase_ = 0; @SuppressWarnings("serial") private java.lang.Object operation_; public enum OperationCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { UPDATE(1), OPERATION_NOT_SET(0); private final int value; private OperationCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static OperationCase valueOf(int value) { return forNumber(value); } public static OperationCase forNumber(int value) { switch (value) { case 1: return UPDATE; case 0: return OPERATION_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public OperationCase getOperationCase() { return OperationCase.forNumber( operationCase_); } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } public static final int UPDATE_FIELD_NUMBER = 1; /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig update = 1;</code> * @return Whether the update field is set. */ @java.lang.Override public boolean hasUpdate() { return operationCase_ == 1; } /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig update = 1;</code> * @return The update. */ @java.lang.Override public com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig getUpdate() { if (operationCase_ == 1) { return (com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig) operation_; } return com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig.getDefaultInstance(); } /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig update = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfigOrBuilder getUpdateOrBuilder() { if (operationCase_ == 1) { return (com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig) operation_; } return com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (operationCase_ == 1) { output.writeMessage(1, (com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig) operation_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (operationCase_ == 1) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, (com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig) operation_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation)) { return super.equals(obj); } com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation other = (com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation) obj; if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask() .equals(other.getUpdateMask())) return false; } if (!getOperationCase().equals(other.getOperationCase())) return false; switch (operationCase_) { case 1: if (!getUpdate() .equals(other.getUpdate())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } switch (operationCase_) { case 1: hash = (37 * hash) + UPDATE_FIELD_NUMBER; hash = (53 * hash) + getUpdate().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * A single operation (update) on a conversion goal campaign config. * </pre> * * Protobuf type {@code google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation) com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperationOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigServiceProto.internal_static_google_ads_googleads_v21_services_ConversionGoalCampaignConfigOperation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigServiceProto.internal_static_google_ads_googleads_v21_services_ConversionGoalCampaignConfigOperation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation.class, com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation.Builder.class); } // Construct using com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } if (updateBuilder_ != null) { updateBuilder_.clear(); } operationCase_ = 0; operation_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigServiceProto.internal_static_google_ads_googleads_v21_services_ConversionGoalCampaignConfigOperation_descriptor; } @java.lang.Override public com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation getDefaultInstanceForType() { return com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation build() { com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation buildPartial() { com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation result = new com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } private void buildPartialOneofs(com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation result) { result.operationCase_ = operationCase_; result.operation_ = this.operation_; if (operationCase_ == 1 && updateBuilder_ != null) { result.operation_ = updateBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation) { return mergeFrom((com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation other) { if (other == com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation.getDefaultInstance()) return this; if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } switch (other.getOperationCase()) { case UPDATE: { mergeUpdate(other.getUpdate()); break; } case OPERATION_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getUpdateFieldBuilder().getBuilder(), extensionRegistry); operationCase_ = 1; break; } // case 10 case 18: { input.readMessage( getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int operationCase_ = 0; private java.lang.Object operation_; public OperationCase getOperationCase() { return OperationCase.forNumber( operationCase_); } public Builder clearOperation() { operationCase_ = 0; operation_ = null; onChanged(); return this; } private int bitField0_; private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask( com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000001); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000001; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * <pre> * FieldMask that determines which resource fields are modified in an update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig, com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig.Builder, com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfigOrBuilder> updateBuilder_; /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig update = 1;</code> * @return Whether the update field is set. */ @java.lang.Override public boolean hasUpdate() { return operationCase_ == 1; } /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig update = 1;</code> * @return The update. */ @java.lang.Override public com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig getUpdate() { if (updateBuilder_ == null) { if (operationCase_ == 1) { return (com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig) operation_; } return com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig.getDefaultInstance(); } else { if (operationCase_ == 1) { return updateBuilder_.getMessage(); } return com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig.getDefaultInstance(); } } /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig update = 1;</code> */ public Builder setUpdate(com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig value) { if (updateBuilder_ == null) { if (value == null) { throw new NullPointerException(); } operation_ = value; onChanged(); } else { updateBuilder_.setMessage(value); } operationCase_ = 1; return this; } /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig update = 1;</code> */ public Builder setUpdate( com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig.Builder builderForValue) { if (updateBuilder_ == null) { operation_ = builderForValue.build(); onChanged(); } else { updateBuilder_.setMessage(builderForValue.build()); } operationCase_ = 1; return this; } /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig update = 1;</code> */ public Builder mergeUpdate(com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig value) { if (updateBuilder_ == null) { if (operationCase_ == 1 && operation_ != com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig.getDefaultInstance()) { operation_ = com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig.newBuilder((com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig) operation_) .mergeFrom(value).buildPartial(); } else { operation_ = value; } onChanged(); } else { if (operationCase_ == 1) { updateBuilder_.mergeFrom(value); } else { updateBuilder_.setMessage(value); } } operationCase_ = 1; return this; } /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig update = 1;</code> */ public Builder clearUpdate() { if (updateBuilder_ == null) { if (operationCase_ == 1) { operationCase_ = 0; operation_ = null; onChanged(); } } else { if (operationCase_ == 1) { operationCase_ = 0; operation_ = null; } updateBuilder_.clear(); } return this; } /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig update = 1;</code> */ public com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig.Builder getUpdateBuilder() { return getUpdateFieldBuilder().getBuilder(); } /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig update = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfigOrBuilder getUpdateOrBuilder() { if ((operationCase_ == 1) && (updateBuilder_ != null)) { return updateBuilder_.getMessageOrBuilder(); } else { if (operationCase_ == 1) { return (com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig) operation_; } return com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig.getDefaultInstance(); } } /** * <pre> * Update operation: The conversion goal campaign config is expected to have * a valid resource name. * </pre> * * <code>.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig update = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig, com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig.Builder, com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfigOrBuilder> getUpdateFieldBuilder() { if (updateBuilder_ == null) { if (!(operationCase_ == 1)) { operation_ = com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig.getDefaultInstance(); } updateBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig, com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig.Builder, com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfigOrBuilder>( (com.google.ads.googleads.v21.resources.ConversionGoalCampaignConfig) operation_, getParentForChildren(), isClean()); operation_ = null; } operationCase_ = 1; onChanged(); return updateBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation) private static final com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation(); } public static com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ConversionGoalCampaignConfigOperation> PARSER = new com.google.protobuf.AbstractParser<ConversionGoalCampaignConfigOperation>() { @java.lang.Override public ConversionGoalCampaignConfigOperation parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ConversionGoalCampaignConfigOperation> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ConversionGoalCampaignConfigOperation> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v21.services.ConversionGoalCampaignConfigOperation getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,563
java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CreateFeatureMonitorJobRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1beta1/feature_registry_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1beta1; /** * * * <pre> * Request message for * [FeatureRegistryService.CreateFeatureMonitorJobRequest][]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest} */ public final class CreateFeatureMonitorJobRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest) CreateFeatureMonitorJobRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateFeatureMonitorJobRequest.newBuilder() to construct. private CreateFeatureMonitorJobRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateFeatureMonitorJobRequest() { parent_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateFeatureMonitorJobRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.FeatureRegistryServiceProto .internal_static_google_cloud_aiplatform_v1beta1_CreateFeatureMonitorJobRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.FeatureRegistryServiceProto .internal_static_google_cloud_aiplatform_v1beta1_CreateFeatureMonitorJobRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest.class, com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest.Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The resource name of FeatureMonitor to create FeatureMonitorJob. * Format: * `projects/{project}/locations/{location}/featureGroups/{feature_group}/featureMonitors/{feature_monitor}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The resource name of FeatureMonitor to create FeatureMonitorJob. * Format: * `projects/{project}/locations/{location}/featureGroups/{feature_group}/featureMonitors/{feature_monitor}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FEATURE_MONITOR_JOB_FIELD_NUMBER = 2; private com.google.cloud.aiplatform.v1beta1.FeatureMonitorJob featureMonitorJob_; /** * * * <pre> * Required. The Monitor to create. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.FeatureMonitorJob feature_monitor_job = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the featureMonitorJob field is set. */ @java.lang.Override public boolean hasFeatureMonitorJob() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The Monitor to create. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.FeatureMonitorJob feature_monitor_job = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The featureMonitorJob. */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.FeatureMonitorJob getFeatureMonitorJob() { return featureMonitorJob_ == null ? com.google.cloud.aiplatform.v1beta1.FeatureMonitorJob.getDefaultInstance() : featureMonitorJob_; } /** * * * <pre> * Required. The Monitor to create. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.FeatureMonitorJob feature_monitor_job = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.FeatureMonitorJobOrBuilder getFeatureMonitorJobOrBuilder() { return featureMonitorJob_ == null ? com.google.cloud.aiplatform.v1beta1.FeatureMonitorJob.getDefaultInstance() : featureMonitorJob_; } public static final int FEATURE_MONITOR_JOB_ID_FIELD_NUMBER = 3; private long featureMonitorJobId_ = 0L; /** * * * <pre> * Optional. Output only. System-generated ID for feature monitor job. * </pre> * * <code> * int64 feature_monitor_job_id = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The featureMonitorJobId. */ @java.lang.Override public long getFeatureMonitorJobId() { return featureMonitorJobId_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getFeatureMonitorJob()); } if (featureMonitorJobId_ != 0L) { output.writeInt64(3, featureMonitorJobId_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getFeatureMonitorJob()); } if (featureMonitorJobId_ != 0L) { size += com.google.protobuf.CodedOutputStream.computeInt64Size(3, featureMonitorJobId_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest)) { return super.equals(obj); } com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest other = (com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest) obj; if (!getParent().equals(other.getParent())) return false; if (hasFeatureMonitorJob() != other.hasFeatureMonitorJob()) return false; if (hasFeatureMonitorJob()) { if (!getFeatureMonitorJob().equals(other.getFeatureMonitorJob())) return false; } if (getFeatureMonitorJobId() != other.getFeatureMonitorJobId()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); if (hasFeatureMonitorJob()) { hash = (37 * hash) + FEATURE_MONITOR_JOB_FIELD_NUMBER; hash = (53 * hash) + getFeatureMonitorJob().hashCode(); } hash = (37 * hash) + FEATURE_MONITOR_JOB_ID_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getFeatureMonitorJobId()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [FeatureRegistryService.CreateFeatureMonitorJobRequest][]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest) com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.FeatureRegistryServiceProto .internal_static_google_cloud_aiplatform_v1beta1_CreateFeatureMonitorJobRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.FeatureRegistryServiceProto .internal_static_google_cloud_aiplatform_v1beta1_CreateFeatureMonitorJobRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest.class, com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest.Builder.class); } // Construct using // com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getFeatureMonitorJobFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; featureMonitorJob_ = null; if (featureMonitorJobBuilder_ != null) { featureMonitorJobBuilder_.dispose(); featureMonitorJobBuilder_ = null; } featureMonitorJobId_ = 0L; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1beta1.FeatureRegistryServiceProto .internal_static_google_cloud_aiplatform_v1beta1_CreateFeatureMonitorJobRequest_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest build() { com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest buildPartial() { com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest result = new com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.featureMonitorJob_ = featureMonitorJobBuilder_ == null ? featureMonitorJob_ : featureMonitorJobBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.featureMonitorJobId_ = featureMonitorJobId_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest) { return mergeFrom( (com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest other) { if (other == com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest .getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasFeatureMonitorJob()) { mergeFeatureMonitorJob(other.getFeatureMonitorJob()); } if (other.getFeatureMonitorJobId() != 0L) { setFeatureMonitorJobId(other.getFeatureMonitorJobId()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getFeatureMonitorJobFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 24: { featureMonitorJobId_ = input.readInt64(); bitField0_ |= 0x00000004; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The resource name of FeatureMonitor to create FeatureMonitorJob. * Format: * `projects/{project}/locations/{location}/featureGroups/{feature_group}/featureMonitors/{feature_monitor}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The resource name of FeatureMonitor to create FeatureMonitorJob. * Format: * `projects/{project}/locations/{location}/featureGroups/{feature_group}/featureMonitors/{feature_monitor}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The resource name of FeatureMonitor to create FeatureMonitorJob. * Format: * `projects/{project}/locations/{location}/featureGroups/{feature_group}/featureMonitors/{feature_monitor}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The resource name of FeatureMonitor to create FeatureMonitorJob. * Format: * `projects/{project}/locations/{location}/featureGroups/{feature_group}/featureMonitors/{feature_monitor}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The resource name of FeatureMonitor to create FeatureMonitorJob. * Format: * `projects/{project}/locations/{location}/featureGroups/{feature_group}/featureMonitors/{feature_monitor}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.cloud.aiplatform.v1beta1.FeatureMonitorJob featureMonitorJob_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.FeatureMonitorJob, com.google.cloud.aiplatform.v1beta1.FeatureMonitorJob.Builder, com.google.cloud.aiplatform.v1beta1.FeatureMonitorJobOrBuilder> featureMonitorJobBuilder_; /** * * * <pre> * Required. The Monitor to create. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.FeatureMonitorJob feature_monitor_job = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the featureMonitorJob field is set. */ public boolean hasFeatureMonitorJob() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The Monitor to create. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.FeatureMonitorJob feature_monitor_job = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The featureMonitorJob. */ public com.google.cloud.aiplatform.v1beta1.FeatureMonitorJob getFeatureMonitorJob() { if (featureMonitorJobBuilder_ == null) { return featureMonitorJob_ == null ? com.google.cloud.aiplatform.v1beta1.FeatureMonitorJob.getDefaultInstance() : featureMonitorJob_; } else { return featureMonitorJobBuilder_.getMessage(); } } /** * * * <pre> * Required. The Monitor to create. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.FeatureMonitorJob feature_monitor_job = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setFeatureMonitorJob( com.google.cloud.aiplatform.v1beta1.FeatureMonitorJob value) { if (featureMonitorJobBuilder_ == null) { if (value == null) { throw new NullPointerException(); } featureMonitorJob_ = value; } else { featureMonitorJobBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The Monitor to create. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.FeatureMonitorJob feature_monitor_job = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setFeatureMonitorJob( com.google.cloud.aiplatform.v1beta1.FeatureMonitorJob.Builder builderForValue) { if (featureMonitorJobBuilder_ == null) { featureMonitorJob_ = builderForValue.build(); } else { featureMonitorJobBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The Monitor to create. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.FeatureMonitorJob feature_monitor_job = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeFeatureMonitorJob( com.google.cloud.aiplatform.v1beta1.FeatureMonitorJob value) { if (featureMonitorJobBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && featureMonitorJob_ != null && featureMonitorJob_ != com.google.cloud.aiplatform.v1beta1.FeatureMonitorJob.getDefaultInstance()) { getFeatureMonitorJobBuilder().mergeFrom(value); } else { featureMonitorJob_ = value; } } else { featureMonitorJobBuilder_.mergeFrom(value); } if (featureMonitorJob_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The Monitor to create. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.FeatureMonitorJob feature_monitor_job = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearFeatureMonitorJob() { bitField0_ = (bitField0_ & ~0x00000002); featureMonitorJob_ = null; if (featureMonitorJobBuilder_ != null) { featureMonitorJobBuilder_.dispose(); featureMonitorJobBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The Monitor to create. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.FeatureMonitorJob feature_monitor_job = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1beta1.FeatureMonitorJob.Builder getFeatureMonitorJobBuilder() { bitField0_ |= 0x00000002; onChanged(); return getFeatureMonitorJobFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The Monitor to create. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.FeatureMonitorJob feature_monitor_job = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1beta1.FeatureMonitorJobOrBuilder getFeatureMonitorJobOrBuilder() { if (featureMonitorJobBuilder_ != null) { return featureMonitorJobBuilder_.getMessageOrBuilder(); } else { return featureMonitorJob_ == null ? com.google.cloud.aiplatform.v1beta1.FeatureMonitorJob.getDefaultInstance() : featureMonitorJob_; } } /** * * * <pre> * Required. The Monitor to create. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.FeatureMonitorJob feature_monitor_job = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.FeatureMonitorJob, com.google.cloud.aiplatform.v1beta1.FeatureMonitorJob.Builder, com.google.cloud.aiplatform.v1beta1.FeatureMonitorJobOrBuilder> getFeatureMonitorJobFieldBuilder() { if (featureMonitorJobBuilder_ == null) { featureMonitorJobBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.FeatureMonitorJob, com.google.cloud.aiplatform.v1beta1.FeatureMonitorJob.Builder, com.google.cloud.aiplatform.v1beta1.FeatureMonitorJobOrBuilder>( getFeatureMonitorJob(), getParentForChildren(), isClean()); featureMonitorJob_ = null; } return featureMonitorJobBuilder_; } private long featureMonitorJobId_; /** * * * <pre> * Optional. Output only. System-generated ID for feature monitor job. * </pre> * * <code> * int64 feature_monitor_job_id = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The featureMonitorJobId. */ @java.lang.Override public long getFeatureMonitorJobId() { return featureMonitorJobId_; } /** * * * <pre> * Optional. Output only. System-generated ID for feature monitor job. * </pre> * * <code> * int64 feature_monitor_job_id = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @param value The featureMonitorJobId to set. * @return This builder for chaining. */ public Builder setFeatureMonitorJobId(long value) { featureMonitorJobId_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. Output only. System-generated ID for feature monitor job. * </pre> * * <code> * int64 feature_monitor_job_id = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return This builder for chaining. */ public Builder clearFeatureMonitorJobId() { bitField0_ = (bitField0_ & ~0x00000004); featureMonitorJobId_ = 0L; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest) private static final com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest(); } public static com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateFeatureMonitorJobRequest> PARSER = new com.google.protobuf.AbstractParser<CreateFeatureMonitorJobRequest>() { @java.lang.Override public CreateFeatureMonitorJobRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateFeatureMonitorJobRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateFeatureMonitorJobRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.CreateFeatureMonitorJobRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,434
java-shopping-merchant-quota/proto-google-shopping-merchant-quota-v1/src/main/java/com/google/shopping/merchant/quota/v1/MethodDetails.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/shopping/merchant/quota/v1/quota.proto // Protobuf Java Version: 3.25.8 package com.google.shopping.merchant.quota.v1; /** * * * <pre> * The method details per method in the Merchant API. * </pre> * * Protobuf type {@code google.shopping.merchant.quota.v1.MethodDetails} */ public final class MethodDetails extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.shopping.merchant.quota.v1.MethodDetails) MethodDetailsOrBuilder { private static final long serialVersionUID = 0L; // Use MethodDetails.newBuilder() to construct. private MethodDetails(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private MethodDetails() { method_ = ""; version_ = ""; subapi_ = ""; path_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new MethodDetails(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.quota.v1.QuotaProto .internal_static_google_shopping_merchant_quota_v1_MethodDetails_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.quota.v1.QuotaProto .internal_static_google_shopping_merchant_quota_v1_MethodDetails_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.quota.v1.MethodDetails.class, com.google.shopping.merchant.quota.v1.MethodDetails.Builder.class); } public static final int METHOD_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object method_ = ""; /** * * * <pre> * Output only. The name of the method for example `products.list`. * </pre> * * <code>string method = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The method. */ @java.lang.Override public java.lang.String getMethod() { java.lang.Object ref = method_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); method_ = s; return s; } } /** * * * <pre> * Output only. The name of the method for example `products.list`. * </pre> * * <code>string method = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for method. */ @java.lang.Override public com.google.protobuf.ByteString getMethodBytes() { java.lang.Object ref = method_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); method_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int VERSION_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object version_ = ""; /** * * * <pre> * Output only. The API version that the method belongs to. * </pre> * * <code>string version = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The version. */ @java.lang.Override public java.lang.String getVersion() { java.lang.Object ref = version_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); version_ = s; return s; } } /** * * * <pre> * Output only. The API version that the method belongs to. * </pre> * * <code>string version = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for version. */ @java.lang.Override public com.google.protobuf.ByteString getVersionBytes() { java.lang.Object ref = version_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); version_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int SUBAPI_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object subapi_ = ""; /** * * * <pre> * Output only. The sub-API that the method belongs to. * </pre> * * <code>string subapi = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The subapi. */ @java.lang.Override public java.lang.String getSubapi() { java.lang.Object ref = subapi_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); subapi_ = s; return s; } } /** * * * <pre> * Output only. The sub-API that the method belongs to. * </pre> * * <code>string subapi = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for subapi. */ @java.lang.Override public com.google.protobuf.ByteString getSubapiBytes() { java.lang.Object ref = subapi_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); subapi_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PATH_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object path_ = ""; /** * * * <pre> * Output only. The path for the method such as * `products/v1/productInputs.insert` * </pre> * * <code>string path = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The path. */ @java.lang.Override public java.lang.String getPath() { java.lang.Object ref = path_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); path_ = s; return s; } } /** * * * <pre> * Output only. The path for the method such as * `products/v1/productInputs.insert` * </pre> * * <code>string path = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for path. */ @java.lang.Override public com.google.protobuf.ByteString getPathBytes() { java.lang.Object ref = path_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); path_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(method_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, method_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(version_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, version_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(subapi_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, subapi_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(path_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, path_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(method_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, method_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(version_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, version_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(subapi_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, subapi_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(path_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, path_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.shopping.merchant.quota.v1.MethodDetails)) { return super.equals(obj); } com.google.shopping.merchant.quota.v1.MethodDetails other = (com.google.shopping.merchant.quota.v1.MethodDetails) obj; if (!getMethod().equals(other.getMethod())) return false; if (!getVersion().equals(other.getVersion())) return false; if (!getSubapi().equals(other.getSubapi())) return false; if (!getPath().equals(other.getPath())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + METHOD_FIELD_NUMBER; hash = (53 * hash) + getMethod().hashCode(); hash = (37 * hash) + VERSION_FIELD_NUMBER; hash = (53 * hash) + getVersion().hashCode(); hash = (37 * hash) + SUBAPI_FIELD_NUMBER; hash = (53 * hash) + getSubapi().hashCode(); hash = (37 * hash) + PATH_FIELD_NUMBER; hash = (53 * hash) + getPath().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.shopping.merchant.quota.v1.MethodDetails parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.quota.v1.MethodDetails parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.quota.v1.MethodDetails parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.quota.v1.MethodDetails parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.quota.v1.MethodDetails parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.quota.v1.MethodDetails parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.quota.v1.MethodDetails parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.quota.v1.MethodDetails parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.quota.v1.MethodDetails parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.shopping.merchant.quota.v1.MethodDetails parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.quota.v1.MethodDetails parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.quota.v1.MethodDetails parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.shopping.merchant.quota.v1.MethodDetails prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The method details per method in the Merchant API. * </pre> * * Protobuf type {@code google.shopping.merchant.quota.v1.MethodDetails} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.shopping.merchant.quota.v1.MethodDetails) com.google.shopping.merchant.quota.v1.MethodDetailsOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.quota.v1.QuotaProto .internal_static_google_shopping_merchant_quota_v1_MethodDetails_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.quota.v1.QuotaProto .internal_static_google_shopping_merchant_quota_v1_MethodDetails_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.quota.v1.MethodDetails.class, com.google.shopping.merchant.quota.v1.MethodDetails.Builder.class); } // Construct using com.google.shopping.merchant.quota.v1.MethodDetails.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; method_ = ""; version_ = ""; subapi_ = ""; path_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.shopping.merchant.quota.v1.QuotaProto .internal_static_google_shopping_merchant_quota_v1_MethodDetails_descriptor; } @java.lang.Override public com.google.shopping.merchant.quota.v1.MethodDetails getDefaultInstanceForType() { return com.google.shopping.merchant.quota.v1.MethodDetails.getDefaultInstance(); } @java.lang.Override public com.google.shopping.merchant.quota.v1.MethodDetails build() { com.google.shopping.merchant.quota.v1.MethodDetails result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.shopping.merchant.quota.v1.MethodDetails buildPartial() { com.google.shopping.merchant.quota.v1.MethodDetails result = new com.google.shopping.merchant.quota.v1.MethodDetails(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.shopping.merchant.quota.v1.MethodDetails result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.method_ = method_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.version_ = version_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.subapi_ = subapi_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.path_ = path_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.shopping.merchant.quota.v1.MethodDetails) { return mergeFrom((com.google.shopping.merchant.quota.v1.MethodDetails) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.shopping.merchant.quota.v1.MethodDetails other) { if (other == com.google.shopping.merchant.quota.v1.MethodDetails.getDefaultInstance()) return this; if (!other.getMethod().isEmpty()) { method_ = other.method_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getVersion().isEmpty()) { version_ = other.version_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getSubapi().isEmpty()) { subapi_ = other.subapi_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getPath().isEmpty()) { path_ = other.path_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { method_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { version_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { subapi_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { path_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object method_ = ""; /** * * * <pre> * Output only. The name of the method for example `products.list`. * </pre> * * <code>string method = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The method. */ public java.lang.String getMethod() { java.lang.Object ref = method_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); method_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Output only. The name of the method for example `products.list`. * </pre> * * <code>string method = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for method. */ public com.google.protobuf.ByteString getMethodBytes() { java.lang.Object ref = method_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); method_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Output only. The name of the method for example `products.list`. * </pre> * * <code>string method = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @param value The method to set. * @return This builder for chaining. */ public Builder setMethod(java.lang.String value) { if (value == null) { throw new NullPointerException(); } method_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Output only. The name of the method for example `products.list`. * </pre> * * <code>string method = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return This builder for chaining. */ public Builder clearMethod() { method_ = getDefaultInstance().getMethod(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Output only. The name of the method for example `products.list`. * </pre> * * <code>string method = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @param value The bytes for method to set. * @return This builder for chaining. */ public Builder setMethodBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); method_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object version_ = ""; /** * * * <pre> * Output only. The API version that the method belongs to. * </pre> * * <code>string version = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The version. */ public java.lang.String getVersion() { java.lang.Object ref = version_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); version_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Output only. The API version that the method belongs to. * </pre> * * <code>string version = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for version. */ public com.google.protobuf.ByteString getVersionBytes() { java.lang.Object ref = version_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); version_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Output only. The API version that the method belongs to. * </pre> * * <code>string version = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @param value The version to set. * @return This builder for chaining. */ public Builder setVersion(java.lang.String value) { if (value == null) { throw new NullPointerException(); } version_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Output only. The API version that the method belongs to. * </pre> * * <code>string version = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return This builder for chaining. */ public Builder clearVersion() { version_ = getDefaultInstance().getVersion(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Output only. The API version that the method belongs to. * </pre> * * <code>string version = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @param value The bytes for version to set. * @return This builder for chaining. */ public Builder setVersionBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); version_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object subapi_ = ""; /** * * * <pre> * Output only. The sub-API that the method belongs to. * </pre> * * <code>string subapi = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The subapi. */ public java.lang.String getSubapi() { java.lang.Object ref = subapi_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); subapi_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Output only. The sub-API that the method belongs to. * </pre> * * <code>string subapi = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for subapi. */ public com.google.protobuf.ByteString getSubapiBytes() { java.lang.Object ref = subapi_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); subapi_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Output only. The sub-API that the method belongs to. * </pre> * * <code>string subapi = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @param value The subapi to set. * @return This builder for chaining. */ public Builder setSubapi(java.lang.String value) { if (value == null) { throw new NullPointerException(); } subapi_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Output only. The sub-API that the method belongs to. * </pre> * * <code>string subapi = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return This builder for chaining. */ public Builder clearSubapi() { subapi_ = getDefaultInstance().getSubapi(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Output only. The sub-API that the method belongs to. * </pre> * * <code>string subapi = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @param value The bytes for subapi to set. * @return This builder for chaining. */ public Builder setSubapiBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); subapi_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object path_ = ""; /** * * * <pre> * Output only. The path for the method such as * `products/v1/productInputs.insert` * </pre> * * <code>string path = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The path. */ public java.lang.String getPath() { java.lang.Object ref = path_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); path_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Output only. The path for the method such as * `products/v1/productInputs.insert` * </pre> * * <code>string path = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for path. */ public com.google.protobuf.ByteString getPathBytes() { java.lang.Object ref = path_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); path_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Output only. The path for the method such as * `products/v1/productInputs.insert` * </pre> * * <code>string path = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @param value The path to set. * @return This builder for chaining. */ public Builder setPath(java.lang.String value) { if (value == null) { throw new NullPointerException(); } path_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Output only. The path for the method such as * `products/v1/productInputs.insert` * </pre> * * <code>string path = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return This builder for chaining. */ public Builder clearPath() { path_ = getDefaultInstance().getPath(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Output only. The path for the method such as * `products/v1/productInputs.insert` * </pre> * * <code>string path = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @param value The bytes for path to set. * @return This builder for chaining. */ public Builder setPathBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); path_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.shopping.merchant.quota.v1.MethodDetails) } // @@protoc_insertion_point(class_scope:google.shopping.merchant.quota.v1.MethodDetails) private static final com.google.shopping.merchant.quota.v1.MethodDetails DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.shopping.merchant.quota.v1.MethodDetails(); } public static com.google.shopping.merchant.quota.v1.MethodDetails getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<MethodDetails> PARSER = new com.google.protobuf.AbstractParser<MethodDetails>() { @java.lang.Override public MethodDetails parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<MethodDetails> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<MethodDetails> getParserForType() { return PARSER; } @java.lang.Override public com.google.shopping.merchant.quota.v1.MethodDetails getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,508
java-securityposture/proto-google-cloud-securityposture-v1/src/main/java/com/google/cloud/securityposture/v1/ListPostureRevisionsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/securityposture/v1/securityposture.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.securityposture.v1; /** * * * <pre> * Message for response to listing PostureRevisions. * </pre> * * Protobuf type {@code google.cloud.securityposture.v1.ListPostureRevisionsResponse} */ public final class ListPostureRevisionsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.securityposture.v1.ListPostureRevisionsResponse) ListPostureRevisionsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListPostureRevisionsResponse.newBuilder() to construct. private ListPostureRevisionsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListPostureRevisionsResponse() { revisions_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListPostureRevisionsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securityposture.v1.V1mainProto .internal_static_google_cloud_securityposture_v1_ListPostureRevisionsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securityposture.v1.V1mainProto .internal_static_google_cloud_securityposture_v1_ListPostureRevisionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securityposture.v1.ListPostureRevisionsResponse.class, com.google.cloud.securityposture.v1.ListPostureRevisionsResponse.Builder.class); } public static final int REVISIONS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.securityposture.v1.Posture> revisions_; /** * * * <pre> * The list of Posture revisions. * </pre> * * <code>repeated .google.cloud.securityposture.v1.Posture revisions = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.securityposture.v1.Posture> getRevisionsList() { return revisions_; } /** * * * <pre> * The list of Posture revisions. * </pre> * * <code>repeated .google.cloud.securityposture.v1.Posture revisions = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.securityposture.v1.PostureOrBuilder> getRevisionsOrBuilderList() { return revisions_; } /** * * * <pre> * The list of Posture revisions. * </pre> * * <code>repeated .google.cloud.securityposture.v1.Posture revisions = 1;</code> */ @java.lang.Override public int getRevisionsCount() { return revisions_.size(); } /** * * * <pre> * The list of Posture revisions. * </pre> * * <code>repeated .google.cloud.securityposture.v1.Posture revisions = 1;</code> */ @java.lang.Override public com.google.cloud.securityposture.v1.Posture getRevisions(int index) { return revisions_.get(index); } /** * * * <pre> * The list of Posture revisions. * </pre> * * <code>repeated .google.cloud.securityposture.v1.Posture revisions = 1;</code> */ @java.lang.Override public com.google.cloud.securityposture.v1.PostureOrBuilder getRevisionsOrBuilder(int index) { return revisions_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < revisions_.size(); i++) { output.writeMessage(1, revisions_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < revisions_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, revisions_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.securityposture.v1.ListPostureRevisionsResponse)) { return super.equals(obj); } com.google.cloud.securityposture.v1.ListPostureRevisionsResponse other = (com.google.cloud.securityposture.v1.ListPostureRevisionsResponse) obj; if (!getRevisionsList().equals(other.getRevisionsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getRevisionsCount() > 0) { hash = (37 * hash) + REVISIONS_FIELD_NUMBER; hash = (53 * hash) + getRevisionsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.securityposture.v1.ListPostureRevisionsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securityposture.v1.ListPostureRevisionsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securityposture.v1.ListPostureRevisionsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securityposture.v1.ListPostureRevisionsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securityposture.v1.ListPostureRevisionsResponse parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securityposture.v1.ListPostureRevisionsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securityposture.v1.ListPostureRevisionsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securityposture.v1.ListPostureRevisionsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securityposture.v1.ListPostureRevisionsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.securityposture.v1.ListPostureRevisionsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securityposture.v1.ListPostureRevisionsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securityposture.v1.ListPostureRevisionsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.securityposture.v1.ListPostureRevisionsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Message for response to listing PostureRevisions. * </pre> * * Protobuf type {@code google.cloud.securityposture.v1.ListPostureRevisionsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.securityposture.v1.ListPostureRevisionsResponse) com.google.cloud.securityposture.v1.ListPostureRevisionsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securityposture.v1.V1mainProto .internal_static_google_cloud_securityposture_v1_ListPostureRevisionsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securityposture.v1.V1mainProto .internal_static_google_cloud_securityposture_v1_ListPostureRevisionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securityposture.v1.ListPostureRevisionsResponse.class, com.google.cloud.securityposture.v1.ListPostureRevisionsResponse.Builder.class); } // Construct using com.google.cloud.securityposture.v1.ListPostureRevisionsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (revisionsBuilder_ == null) { revisions_ = java.util.Collections.emptyList(); } else { revisions_ = null; revisionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.securityposture.v1.V1mainProto .internal_static_google_cloud_securityposture_v1_ListPostureRevisionsResponse_descriptor; } @java.lang.Override public com.google.cloud.securityposture.v1.ListPostureRevisionsResponse getDefaultInstanceForType() { return com.google.cloud.securityposture.v1.ListPostureRevisionsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.securityposture.v1.ListPostureRevisionsResponse build() { com.google.cloud.securityposture.v1.ListPostureRevisionsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.securityposture.v1.ListPostureRevisionsResponse buildPartial() { com.google.cloud.securityposture.v1.ListPostureRevisionsResponse result = new com.google.cloud.securityposture.v1.ListPostureRevisionsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.securityposture.v1.ListPostureRevisionsResponse result) { if (revisionsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { revisions_ = java.util.Collections.unmodifiableList(revisions_); bitField0_ = (bitField0_ & ~0x00000001); } result.revisions_ = revisions_; } else { result.revisions_ = revisionsBuilder_.build(); } } private void buildPartial0( com.google.cloud.securityposture.v1.ListPostureRevisionsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.securityposture.v1.ListPostureRevisionsResponse) { return mergeFrom((com.google.cloud.securityposture.v1.ListPostureRevisionsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.securityposture.v1.ListPostureRevisionsResponse other) { if (other == com.google.cloud.securityposture.v1.ListPostureRevisionsResponse.getDefaultInstance()) return this; if (revisionsBuilder_ == null) { if (!other.revisions_.isEmpty()) { if (revisions_.isEmpty()) { revisions_ = other.revisions_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureRevisionsIsMutable(); revisions_.addAll(other.revisions_); } onChanged(); } } else { if (!other.revisions_.isEmpty()) { if (revisionsBuilder_.isEmpty()) { revisionsBuilder_.dispose(); revisionsBuilder_ = null; revisions_ = other.revisions_; bitField0_ = (bitField0_ & ~0x00000001); revisionsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRevisionsFieldBuilder() : null; } else { revisionsBuilder_.addAllMessages(other.revisions_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.securityposture.v1.Posture m = input.readMessage( com.google.cloud.securityposture.v1.Posture.parser(), extensionRegistry); if (revisionsBuilder_ == null) { ensureRevisionsIsMutable(); revisions_.add(m); } else { revisionsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.securityposture.v1.Posture> revisions_ = java.util.Collections.emptyList(); private void ensureRevisionsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { revisions_ = new java.util.ArrayList<com.google.cloud.securityposture.v1.Posture>(revisions_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.securityposture.v1.Posture, com.google.cloud.securityposture.v1.Posture.Builder, com.google.cloud.securityposture.v1.PostureOrBuilder> revisionsBuilder_; /** * * * <pre> * The list of Posture revisions. * </pre> * * <code>repeated .google.cloud.securityposture.v1.Posture revisions = 1;</code> */ public java.util.List<com.google.cloud.securityposture.v1.Posture> getRevisionsList() { if (revisionsBuilder_ == null) { return java.util.Collections.unmodifiableList(revisions_); } else { return revisionsBuilder_.getMessageList(); } } /** * * * <pre> * The list of Posture revisions. * </pre> * * <code>repeated .google.cloud.securityposture.v1.Posture revisions = 1;</code> */ public int getRevisionsCount() { if (revisionsBuilder_ == null) { return revisions_.size(); } else { return revisionsBuilder_.getCount(); } } /** * * * <pre> * The list of Posture revisions. * </pre> * * <code>repeated .google.cloud.securityposture.v1.Posture revisions = 1;</code> */ public com.google.cloud.securityposture.v1.Posture getRevisions(int index) { if (revisionsBuilder_ == null) { return revisions_.get(index); } else { return revisionsBuilder_.getMessage(index); } } /** * * * <pre> * The list of Posture revisions. * </pre> * * <code>repeated .google.cloud.securityposture.v1.Posture revisions = 1;</code> */ public Builder setRevisions(int index, com.google.cloud.securityposture.v1.Posture value) { if (revisionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRevisionsIsMutable(); revisions_.set(index, value); onChanged(); } else { revisionsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of Posture revisions. * </pre> * * <code>repeated .google.cloud.securityposture.v1.Posture revisions = 1;</code> */ public Builder setRevisions( int index, com.google.cloud.securityposture.v1.Posture.Builder builderForValue) { if (revisionsBuilder_ == null) { ensureRevisionsIsMutable(); revisions_.set(index, builderForValue.build()); onChanged(); } else { revisionsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of Posture revisions. * </pre> * * <code>repeated .google.cloud.securityposture.v1.Posture revisions = 1;</code> */ public Builder addRevisions(com.google.cloud.securityposture.v1.Posture value) { if (revisionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRevisionsIsMutable(); revisions_.add(value); onChanged(); } else { revisionsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of Posture revisions. * </pre> * * <code>repeated .google.cloud.securityposture.v1.Posture revisions = 1;</code> */ public Builder addRevisions(int index, com.google.cloud.securityposture.v1.Posture value) { if (revisionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRevisionsIsMutable(); revisions_.add(index, value); onChanged(); } else { revisionsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of Posture revisions. * </pre> * * <code>repeated .google.cloud.securityposture.v1.Posture revisions = 1;</code> */ public Builder addRevisions( com.google.cloud.securityposture.v1.Posture.Builder builderForValue) { if (revisionsBuilder_ == null) { ensureRevisionsIsMutable(); revisions_.add(builderForValue.build()); onChanged(); } else { revisionsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of Posture revisions. * </pre> * * <code>repeated .google.cloud.securityposture.v1.Posture revisions = 1;</code> */ public Builder addRevisions( int index, com.google.cloud.securityposture.v1.Posture.Builder builderForValue) { if (revisionsBuilder_ == null) { ensureRevisionsIsMutable(); revisions_.add(index, builderForValue.build()); onChanged(); } else { revisionsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of Posture revisions. * </pre> * * <code>repeated .google.cloud.securityposture.v1.Posture revisions = 1;</code> */ public Builder addAllRevisions( java.lang.Iterable<? extends com.google.cloud.securityposture.v1.Posture> values) { if (revisionsBuilder_ == null) { ensureRevisionsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, revisions_); onChanged(); } else { revisionsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of Posture revisions. * </pre> * * <code>repeated .google.cloud.securityposture.v1.Posture revisions = 1;</code> */ public Builder clearRevisions() { if (revisionsBuilder_ == null) { revisions_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { revisionsBuilder_.clear(); } return this; } /** * * * <pre> * The list of Posture revisions. * </pre> * * <code>repeated .google.cloud.securityposture.v1.Posture revisions = 1;</code> */ public Builder removeRevisions(int index) { if (revisionsBuilder_ == null) { ensureRevisionsIsMutable(); revisions_.remove(index); onChanged(); } else { revisionsBuilder_.remove(index); } return this; } /** * * * <pre> * The list of Posture revisions. * </pre> * * <code>repeated .google.cloud.securityposture.v1.Posture revisions = 1;</code> */ public com.google.cloud.securityposture.v1.Posture.Builder getRevisionsBuilder(int index) { return getRevisionsFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of Posture revisions. * </pre> * * <code>repeated .google.cloud.securityposture.v1.Posture revisions = 1;</code> */ public com.google.cloud.securityposture.v1.PostureOrBuilder getRevisionsOrBuilder(int index) { if (revisionsBuilder_ == null) { return revisions_.get(index); } else { return revisionsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of Posture revisions. * </pre> * * <code>repeated .google.cloud.securityposture.v1.Posture revisions = 1;</code> */ public java.util.List<? extends com.google.cloud.securityposture.v1.PostureOrBuilder> getRevisionsOrBuilderList() { if (revisionsBuilder_ != null) { return revisionsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(revisions_); } } /** * * * <pre> * The list of Posture revisions. * </pre> * * <code>repeated .google.cloud.securityposture.v1.Posture revisions = 1;</code> */ public com.google.cloud.securityposture.v1.Posture.Builder addRevisionsBuilder() { return getRevisionsFieldBuilder() .addBuilder(com.google.cloud.securityposture.v1.Posture.getDefaultInstance()); } /** * * * <pre> * The list of Posture revisions. * </pre> * * <code>repeated .google.cloud.securityposture.v1.Posture revisions = 1;</code> */ public com.google.cloud.securityposture.v1.Posture.Builder addRevisionsBuilder(int index) { return getRevisionsFieldBuilder() .addBuilder(index, com.google.cloud.securityposture.v1.Posture.getDefaultInstance()); } /** * * * <pre> * The list of Posture revisions. * </pre> * * <code>repeated .google.cloud.securityposture.v1.Posture revisions = 1;</code> */ public java.util.List<com.google.cloud.securityposture.v1.Posture.Builder> getRevisionsBuilderList() { return getRevisionsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.securityposture.v1.Posture, com.google.cloud.securityposture.v1.Posture.Builder, com.google.cloud.securityposture.v1.PostureOrBuilder> getRevisionsFieldBuilder() { if (revisionsBuilder_ == null) { revisionsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.securityposture.v1.Posture, com.google.cloud.securityposture.v1.Posture.Builder, com.google.cloud.securityposture.v1.PostureOrBuilder>( revisions_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); revisions_ = null; } return revisionsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.securityposture.v1.ListPostureRevisionsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.securityposture.v1.ListPostureRevisionsResponse) private static final com.google.cloud.securityposture.v1.ListPostureRevisionsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.securityposture.v1.ListPostureRevisionsResponse(); } public static com.google.cloud.securityposture.v1.ListPostureRevisionsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListPostureRevisionsResponse> PARSER = new com.google.protobuf.AbstractParser<ListPostureRevisionsResponse>() { @java.lang.Override public ListPostureRevisionsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListPostureRevisionsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListPostureRevisionsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.securityposture.v1.ListPostureRevisionsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,614
java-netapp/proto-google-cloud-netapp-v1/src/main/java/com/google/cloud/netapp/v1/UpdateBackupVaultRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/netapp/v1/backup_vault.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.netapp.v1; /** * * * <pre> * UpdateBackupVaultRequest updates description and/or labels for a backupVault. * </pre> * * Protobuf type {@code google.cloud.netapp.v1.UpdateBackupVaultRequest} */ public final class UpdateBackupVaultRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.netapp.v1.UpdateBackupVaultRequest) UpdateBackupVaultRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateBackupVaultRequest.newBuilder() to construct. private UpdateBackupVaultRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateBackupVaultRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateBackupVaultRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.netapp.v1.BackupVaultProto .internal_static_google_cloud_netapp_v1_UpdateBackupVaultRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.netapp.v1.BackupVaultProto .internal_static_google_cloud_netapp_v1_UpdateBackupVaultRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.netapp.v1.UpdateBackupVaultRequest.class, com.google.cloud.netapp.v1.UpdateBackupVaultRequest.Builder.class); } private int bitField0_; public static final int UPDATE_MASK_FIELD_NUMBER = 1; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Required. Field mask is used to specify the fields to be overwritten in the * Backup resource to be updated. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Field mask is used to specify the fields to be overwritten in the * Backup resource to be updated. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Required. Field mask is used to specify the fields to be overwritten in the * Backup resource to be updated. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } public static final int BACKUP_VAULT_FIELD_NUMBER = 2; private com.google.cloud.netapp.v1.BackupVault backupVault_; /** * * * <pre> * Required. The backupVault being updated * </pre> * * <code> * .google.cloud.netapp.v1.BackupVault backup_vault = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the backupVault field is set. */ @java.lang.Override public boolean hasBackupVault() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The backupVault being updated * </pre> * * <code> * .google.cloud.netapp.v1.BackupVault backup_vault = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The backupVault. */ @java.lang.Override public com.google.cloud.netapp.v1.BackupVault getBackupVault() { return backupVault_ == null ? com.google.cloud.netapp.v1.BackupVault.getDefaultInstance() : backupVault_; } /** * * * <pre> * Required. The backupVault being updated * </pre> * * <code> * .google.cloud.netapp.v1.BackupVault backup_vault = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.netapp.v1.BackupVaultOrBuilder getBackupVaultOrBuilder() { return backupVault_ == null ? com.google.cloud.netapp.v1.BackupVault.getDefaultInstance() : backupVault_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getUpdateMask()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getBackupVault()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getUpdateMask()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getBackupVault()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.netapp.v1.UpdateBackupVaultRequest)) { return super.equals(obj); } com.google.cloud.netapp.v1.UpdateBackupVaultRequest other = (com.google.cloud.netapp.v1.UpdateBackupVaultRequest) obj; if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (hasBackupVault() != other.hasBackupVault()) return false; if (hasBackupVault()) { if (!getBackupVault().equals(other.getBackupVault())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } if (hasBackupVault()) { hash = (37 * hash) + BACKUP_VAULT_FIELD_NUMBER; hash = (53 * hash) + getBackupVault().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.netapp.v1.UpdateBackupVaultRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.netapp.v1.UpdateBackupVaultRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.netapp.v1.UpdateBackupVaultRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.netapp.v1.UpdateBackupVaultRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.netapp.v1.UpdateBackupVaultRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.netapp.v1.UpdateBackupVaultRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.netapp.v1.UpdateBackupVaultRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.netapp.v1.UpdateBackupVaultRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.netapp.v1.UpdateBackupVaultRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.netapp.v1.UpdateBackupVaultRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.netapp.v1.UpdateBackupVaultRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.netapp.v1.UpdateBackupVaultRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.netapp.v1.UpdateBackupVaultRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * UpdateBackupVaultRequest updates description and/or labels for a backupVault. * </pre> * * Protobuf type {@code google.cloud.netapp.v1.UpdateBackupVaultRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.netapp.v1.UpdateBackupVaultRequest) com.google.cloud.netapp.v1.UpdateBackupVaultRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.netapp.v1.BackupVaultProto .internal_static_google_cloud_netapp_v1_UpdateBackupVaultRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.netapp.v1.BackupVaultProto .internal_static_google_cloud_netapp_v1_UpdateBackupVaultRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.netapp.v1.UpdateBackupVaultRequest.class, com.google.cloud.netapp.v1.UpdateBackupVaultRequest.Builder.class); } // Construct using com.google.cloud.netapp.v1.UpdateBackupVaultRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getUpdateMaskFieldBuilder(); getBackupVaultFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } backupVault_ = null; if (backupVaultBuilder_ != null) { backupVaultBuilder_.dispose(); backupVaultBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.netapp.v1.BackupVaultProto .internal_static_google_cloud_netapp_v1_UpdateBackupVaultRequest_descriptor; } @java.lang.Override public com.google.cloud.netapp.v1.UpdateBackupVaultRequest getDefaultInstanceForType() { return com.google.cloud.netapp.v1.UpdateBackupVaultRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.netapp.v1.UpdateBackupVaultRequest build() { com.google.cloud.netapp.v1.UpdateBackupVaultRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.netapp.v1.UpdateBackupVaultRequest buildPartial() { com.google.cloud.netapp.v1.UpdateBackupVaultRequest result = new com.google.cloud.netapp.v1.UpdateBackupVaultRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.netapp.v1.UpdateBackupVaultRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.backupVault_ = backupVaultBuilder_ == null ? backupVault_ : backupVaultBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.netapp.v1.UpdateBackupVaultRequest) { return mergeFrom((com.google.cloud.netapp.v1.UpdateBackupVaultRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.netapp.v1.UpdateBackupVaultRequest other) { if (other == com.google.cloud.netapp.v1.UpdateBackupVaultRequest.getDefaultInstance()) return this; if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } if (other.hasBackupVault()) { mergeBackupVault(other.getBackupVault()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getBackupVaultFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Required. Field mask is used to specify the fields to be overwritten in the * Backup resource to be updated. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Field mask is used to specify the fields to be overwritten in the * Backup resource to be updated. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Required. Field mask is used to specify the fields to be overwritten in the * Backup resource to be updated. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Field mask is used to specify the fields to be overwritten in the * Backup resource to be updated. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Field mask is used to specify the fields to be overwritten in the * Backup resource to be updated. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. Field mask is used to specify the fields to be overwritten in the * Backup resource to be updated. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000001); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. Field mask is used to specify the fields to be overwritten in the * Backup resource to be updated. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000001; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Field mask is used to specify the fields to be overwritten in the * Backup resource to be updated. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Required. Field mask is used to specify the fields to be overwritten in the * Backup resource to be updated. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } private com.google.cloud.netapp.v1.BackupVault backupVault_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.netapp.v1.BackupVault, com.google.cloud.netapp.v1.BackupVault.Builder, com.google.cloud.netapp.v1.BackupVaultOrBuilder> backupVaultBuilder_; /** * * * <pre> * Required. The backupVault being updated * </pre> * * <code> * .google.cloud.netapp.v1.BackupVault backup_vault = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the backupVault field is set. */ public boolean hasBackupVault() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The backupVault being updated * </pre> * * <code> * .google.cloud.netapp.v1.BackupVault backup_vault = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The backupVault. */ public com.google.cloud.netapp.v1.BackupVault getBackupVault() { if (backupVaultBuilder_ == null) { return backupVault_ == null ? com.google.cloud.netapp.v1.BackupVault.getDefaultInstance() : backupVault_; } else { return backupVaultBuilder_.getMessage(); } } /** * * * <pre> * Required. The backupVault being updated * </pre> * * <code> * .google.cloud.netapp.v1.BackupVault backup_vault = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setBackupVault(com.google.cloud.netapp.v1.BackupVault value) { if (backupVaultBuilder_ == null) { if (value == null) { throw new NullPointerException(); } backupVault_ = value; } else { backupVaultBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The backupVault being updated * </pre> * * <code> * .google.cloud.netapp.v1.BackupVault backup_vault = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setBackupVault(com.google.cloud.netapp.v1.BackupVault.Builder builderForValue) { if (backupVaultBuilder_ == null) { backupVault_ = builderForValue.build(); } else { backupVaultBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The backupVault being updated * </pre> * * <code> * .google.cloud.netapp.v1.BackupVault backup_vault = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeBackupVault(com.google.cloud.netapp.v1.BackupVault value) { if (backupVaultBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && backupVault_ != null && backupVault_ != com.google.cloud.netapp.v1.BackupVault.getDefaultInstance()) { getBackupVaultBuilder().mergeFrom(value); } else { backupVault_ = value; } } else { backupVaultBuilder_.mergeFrom(value); } if (backupVault_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The backupVault being updated * </pre> * * <code> * .google.cloud.netapp.v1.BackupVault backup_vault = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearBackupVault() { bitField0_ = (bitField0_ & ~0x00000002); backupVault_ = null; if (backupVaultBuilder_ != null) { backupVaultBuilder_.dispose(); backupVaultBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The backupVault being updated * </pre> * * <code> * .google.cloud.netapp.v1.BackupVault backup_vault = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.netapp.v1.BackupVault.Builder getBackupVaultBuilder() { bitField0_ |= 0x00000002; onChanged(); return getBackupVaultFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The backupVault being updated * </pre> * * <code> * .google.cloud.netapp.v1.BackupVault backup_vault = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.netapp.v1.BackupVaultOrBuilder getBackupVaultOrBuilder() { if (backupVaultBuilder_ != null) { return backupVaultBuilder_.getMessageOrBuilder(); } else { return backupVault_ == null ? com.google.cloud.netapp.v1.BackupVault.getDefaultInstance() : backupVault_; } } /** * * * <pre> * Required. The backupVault being updated * </pre> * * <code> * .google.cloud.netapp.v1.BackupVault backup_vault = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.netapp.v1.BackupVault, com.google.cloud.netapp.v1.BackupVault.Builder, com.google.cloud.netapp.v1.BackupVaultOrBuilder> getBackupVaultFieldBuilder() { if (backupVaultBuilder_ == null) { backupVaultBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.netapp.v1.BackupVault, com.google.cloud.netapp.v1.BackupVault.Builder, com.google.cloud.netapp.v1.BackupVaultOrBuilder>( getBackupVault(), getParentForChildren(), isClean()); backupVault_ = null; } return backupVaultBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.netapp.v1.UpdateBackupVaultRequest) } // @@protoc_insertion_point(class_scope:google.cloud.netapp.v1.UpdateBackupVaultRequest) private static final com.google.cloud.netapp.v1.UpdateBackupVaultRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.netapp.v1.UpdateBackupVaultRequest(); } public static com.google.cloud.netapp.v1.UpdateBackupVaultRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateBackupVaultRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateBackupVaultRequest>() { @java.lang.Override public UpdateBackupVaultRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateBackupVaultRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateBackupVaultRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.netapp.v1.UpdateBackupVaultRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/rocketmq
36,741
client/src/main/java/org/apache/rocketmq/client/consumer/DefaultMQPushConsumer.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.rocketmq.client.consumer; import org.apache.rocketmq.client.ClientConfig; import org.apache.rocketmq.client.QueryResult; import org.apache.rocketmq.client.consumer.listener.MessageListener; import org.apache.rocketmq.client.consumer.listener.MessageListenerConcurrently; import org.apache.rocketmq.client.consumer.listener.MessageListenerOrderly; import org.apache.rocketmq.client.consumer.rebalance.AllocateMessageQueueAveragely; import org.apache.rocketmq.client.consumer.store.OffsetStore; import org.apache.rocketmq.client.exception.MQBrokerException; import org.apache.rocketmq.client.exception.MQClientException; import org.apache.rocketmq.client.hook.ConsumeMessageHook; import org.apache.rocketmq.client.impl.consumer.DefaultMQPushConsumerImpl; import org.apache.rocketmq.client.trace.AsyncTraceDispatcher; import org.apache.rocketmq.client.trace.TraceDispatcher; import org.apache.rocketmq.client.trace.hook.ConsumeMessageTraceHookImpl; import org.apache.rocketmq.common.MixAll; import org.apache.rocketmq.common.UtilAll; import org.apache.rocketmq.common.consumer.ConsumeFromWhere; import org.apache.rocketmq.common.message.MessageDecoder; import org.apache.rocketmq.common.message.MessageExt; import org.apache.rocketmq.common.message.MessageQueue; import org.apache.rocketmq.logging.org.slf4j.Logger; import org.apache.rocketmq.logging.org.slf4j.LoggerFactory; import org.apache.rocketmq.remoting.RPCHook; import org.apache.rocketmq.remoting.exception.RemotingException; import org.apache.rocketmq.remoting.protocol.NamespaceUtil; import org.apache.rocketmq.remoting.protocol.heartbeat.MessageModel; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import java.util.Set; /** * In most scenarios, this is the mostly recommended class to consume messages. * </p> * Technically speaking, this push client is virtually a wrapper of the underlying pull service. Specifically, on * arrival of messages pulled from brokers, it roughly invokes the registered callback handler to feed the messages. * </p> * See quickstart/Consumer in the example module for a typical usage. * </p> * * <p> * <strong>Thread Safety:</strong> After initialization, the instance can be regarded as thread-safe. * </p> */ public class DefaultMQPushConsumer extends ClientConfig implements MQPushConsumer { private final Logger log = LoggerFactory.getLogger(DefaultMQPushConsumer.class); /** * Internal implementation. Most of the functions herein are delegated to it. */ protected final transient DefaultMQPushConsumerImpl defaultMQPushConsumerImpl; /** * Consumers of the same role is required to have exactly same subscriptions and consumerGroup to correctly achieve * load balance. It's required and needs to be globally unique. * </p> * See <a href="https://rocketmq.apache.org/docs/introduction/02concepts">here</a> for further discussion. */ private String consumerGroup; /** * Message model defines the way how messages are delivered to each consumer clients. * </p> * RocketMQ supports two message models: clustering and broadcasting. If clustering is set, consumer clients with * the same {@link #consumerGroup} would only consume shards of the messages subscribed, which achieves load * balances; Conversely, if the broadcasting is set, each consumer client will consume all subscribed messages * separately. * </p> * This field defaults to clustering. */ private MessageModel messageModel = MessageModel.CLUSTERING; /** * Consuming point on consumer booting. * </p> * There are three consuming points: * <ul> * <li> * <code>CONSUME_FROM_LAST_OFFSET</code>: consumer clients pick up where it stopped previously. * If it were a newly booting up consumer client, according aging of the consumer group, there are two * cases: * <ol> * <li> * if the consumer group is created so recently that the earliest message being subscribed has yet * expired, which means the consumer group represents a lately launched business, consuming will * start from the very beginning; * </li> * <li> * if the earliest message being subscribed has expired, consuming will start from the latest * messages, meaning messages born prior to the booting timestamp would be ignored. * </li> * </ol> * </li> * <li> * <code>CONSUME_FROM_FIRST_OFFSET</code>: Consumer client will start from earliest messages available. * </li> * <li> * <code>CONSUME_FROM_TIMESTAMP</code>: Consumer client will start from specified timestamp, which means * messages born prior to {@link #consumeTimestamp} will be ignored * </li> * </ul> */ private ConsumeFromWhere consumeFromWhere = ConsumeFromWhere.CONSUME_FROM_LAST_OFFSET; /** * Backtracking consumption time with second precision. Time format is * 20131223171201<br> * Implying Seventeen twelve and 01 seconds on December 23, 2013 year<br> * Default backtracking consumption time Half an hour ago. */ private String consumeTimestamp = UtilAll.timeMillisToHumanString3(System.currentTimeMillis() - (1000 * 60 * 30)); /** * Queue allocation algorithm specifying how message queues are allocated to each consumer clients. */ private AllocateMessageQueueStrategy allocateMessageQueueStrategy; /** * Subscription relationship */ private Map<String /* topic */, String /* sub expression */> subscription = new HashMap<>(); /** * Message listener */ private MessageListener messageListener; /** * Listener to call if message queue assignment is changed. */ private MessageQueueListener messageQueueListener; /** * Offset Storage */ private OffsetStore offsetStore; /** * Minimum consumer thread number */ private int consumeThreadMin = 20; /** * Max consumer thread number */ private int consumeThreadMax = 20; /** * Threshold for dynamic adjustment of the number of thread pool */ private long adjustThreadPoolNumsThreshold = 100000; /** * Concurrently max span offset.it has no effect on sequential consumption */ private int consumeConcurrentlyMaxSpan = 2000; /** * Flow control threshold on queue level, each message queue will cache at most 1000 messages by default, * Consider the {@code pullBatchSize}, the instantaneous value may exceed the limit */ private int pullThresholdForQueue = 1000; /** * Flow control threshold on queue level, means max num of messages waiting to ack. * in contrast with pull threshold, once a message is popped, it's considered the beginning of consumption. */ private int popThresholdForQueue = 96; /** * Limit the cached message size on queue level, each message queue will cache at most 100 MiB messages by default, * Consider the {@code pullBatchSize}, the instantaneous value may exceed the limit * * <p> * The size(MB) of a message only measured by message body, so it's not accurate */ private int pullThresholdSizeForQueue = 100; /** * Flow control threshold on topic level, default value is -1(Unlimited) * <p> * The value of {@code pullThresholdForQueue} will be overwritten and calculated based on * {@code pullThresholdForTopic} if it isn't unlimited * <p> * For example, if the value of pullThresholdForTopic is 1000 and 10 message queues are assigned to this consumer, * then pullThresholdForQueue will be set to 100 */ private int pullThresholdForTopic = -1; /** * Limit the cached message size on topic level, default value is -1 MiB(Unlimited) * <p> * The value of {@code pullThresholdSizeForQueue} will be overwritten and calculated based on * {@code pullThresholdSizeForTopic} if it isn't unlimited * <p> * For example, if the value of pullThresholdSizeForTopic is 1000 MiB and 10 message queues are * assigned to this consumer, then pullThresholdSizeForQueue will be set to 100 MiB */ private int pullThresholdSizeForTopic = -1; /** * Message pull Interval */ private long pullInterval = 0; /** * Batch consumption size */ private int consumeMessageBatchMaxSize = 1; /** * Batch pull size */ private int pullBatchSize = 32; private int pullBatchSizeInBytes = 256 * 1024; /** * Whether update subscription relationship when every pull */ private boolean postSubscriptionWhenPull = false; /** * Whether the unit of subscription group */ private boolean unitMode = false; /** * Max re-consume times. * In concurrently mode, -1 means 16; * In orderly mode, -1 means Integer.MAX_VALUE. * If messages are re-consumed more than {@link #maxReconsumeTimes} before success. */ private int maxReconsumeTimes = -1; /** * Suspending pulling time for cases requiring slow pulling like flow-control scenario. */ private long suspendCurrentQueueTimeMillis = 1000; /** * Maximum amount of time in minutes a message may block the consuming thread. */ private long consumeTimeout = 15; /** * Maximum amount of invisible time in millisecond of a message, rang is [5000, 300000] */ private long popInvisibleTime = 60000; /** * Batch pop size. range is [1, 32] */ private int popBatchNums = 32; /** * Maximum time to await message consuming when shutdown consumer, 0 indicates no await. */ private long awaitTerminationMillisWhenShutdown = 0; /** * Interface of asynchronous transfer data */ private TraceDispatcher traceDispatcher = null; // force to use client rebalance private boolean clientRebalance = true; private RPCHook rpcHook = null; /** * Default constructor. */ public DefaultMQPushConsumer() { this(MixAll.DEFAULT_CONSUMER_GROUP, null, new AllocateMessageQueueAveragely()); } /** * Constructor specifying consumer group. * * @param consumerGroup Consumer group. */ public DefaultMQPushConsumer(final String consumerGroup) { this(consumerGroup, null, new AllocateMessageQueueAveragely()); } /** * Constructor specifying RPC hook. * * @param rpcHook RPC hook to execute before each remoting command. */ public DefaultMQPushConsumer(RPCHook rpcHook) { this(MixAll.DEFAULT_CONSUMER_GROUP, rpcHook, new AllocateMessageQueueAveragely()); } /** * Constructor specifying consumer group, RPC hook. * * @param consumerGroup Consumer group. * @param rpcHook RPC hook to execute before each remoting command. */ public DefaultMQPushConsumer(final String consumerGroup, RPCHook rpcHook) { this(consumerGroup, rpcHook, new AllocateMessageQueueAveragely()); } /** * Constructor specifying consumer group, enabled msg trace flag and customized trace topic name. * * @param consumerGroup Consumer group. * @param enableMsgTrace Switch flag instance for message trace. * @param customizedTraceTopic The name value of message trace topic.If you don't config,you can use the default trace topic name. */ public DefaultMQPushConsumer(final String consumerGroup, boolean enableMsgTrace, final String customizedTraceTopic) { this(consumerGroup, null, new AllocateMessageQueueAveragely(), enableMsgTrace, customizedTraceTopic); } /** * Constructor specifying consumer group, RPC hook and message queue allocating algorithm. * * @param consumerGroup Consumer group. * @param rpcHook RPC hook to execute before each remoting command. * @param allocateMessageQueueStrategy Message queue allocating algorithm. */ public DefaultMQPushConsumer(final String consumerGroup, RPCHook rpcHook, AllocateMessageQueueStrategy allocateMessageQueueStrategy) { this(consumerGroup, rpcHook, allocateMessageQueueStrategy, false, null); } /** * Constructor specifying consumer group, RPC hook, message queue allocating algorithm, enabled msg trace flag and customized trace topic name. * * @param consumerGroup Consumer group. * @param rpcHook RPC hook to execute before each remoting command. * @param allocateMessageQueueStrategy message queue allocating algorithm. * @param enableMsgTrace Switch flag instance for message trace. * @param customizedTraceTopic The name value of message trace topic.If you don't config,you can use the default trace topic name. */ public DefaultMQPushConsumer(final String consumerGroup, RPCHook rpcHook, AllocateMessageQueueStrategy allocateMessageQueueStrategy, boolean enableMsgTrace, final String customizedTraceTopic) { this.consumerGroup = consumerGroup; this.rpcHook = rpcHook; this.allocateMessageQueueStrategy = allocateMessageQueueStrategy; defaultMQPushConsumerImpl = new DefaultMQPushConsumerImpl(this, rpcHook); this.enableTrace = enableMsgTrace; this.traceTopic = customizedTraceTopic; } /** * Constructor specifying namespace and consumer group. * * @param namespace Namespace for this MQ Producer instance. * @param consumerGroup Consumer group. */ @Deprecated public DefaultMQPushConsumer(final String namespace, final String consumerGroup) { this(namespace, consumerGroup, null, new AllocateMessageQueueAveragely()); } /** * Constructor specifying namespace, consumer group and RPC hook . * * @param namespace Namespace for this MQ Producer instance. * @param consumerGroup Consumer group. * @param rpcHook RPC hook to execute before each remoting command. */ @Deprecated public DefaultMQPushConsumer(final String namespace, final String consumerGroup, RPCHook rpcHook) { this(namespace, consumerGroup, rpcHook, new AllocateMessageQueueAveragely()); } /** * Constructor specifying namespace, consumer group, RPC hook and message queue allocating algorithm. * * @param namespace Namespace for this MQ Producer instance. * @param consumerGroup Consumer group. * @param rpcHook RPC hook to execute before each remoting command. * @param allocateMessageQueueStrategy Message queue allocating algorithm. */ @Deprecated public DefaultMQPushConsumer(final String namespace, final String consumerGroup, RPCHook rpcHook, AllocateMessageQueueStrategy allocateMessageQueueStrategy) { this.consumerGroup = consumerGroup; this.namespace = namespace; this.rpcHook = rpcHook; this.allocateMessageQueueStrategy = allocateMessageQueueStrategy; defaultMQPushConsumerImpl = new DefaultMQPushConsumerImpl(this, rpcHook); } /** * Constructor specifying namespace, consumer group, RPC hook, message queue allocating algorithm, enabled msg trace flag and customized trace topic name. * * @param namespace Namespace for this MQ Producer instance. * @param consumerGroup Consumer group. * @param rpcHook RPC hook to execute before each remoting command. * @param allocateMessageQueueStrategy message queue allocating algorithm. * @param enableMsgTrace Switch flag instance for message trace. * @param customizedTraceTopic The name value of message trace topic.If you don't config,you can use the default trace topic name. */ @Deprecated public DefaultMQPushConsumer(final String namespace, final String consumerGroup, RPCHook rpcHook, AllocateMessageQueueStrategy allocateMessageQueueStrategy, boolean enableMsgTrace, final String customizedTraceTopic) { this.consumerGroup = consumerGroup; this.namespace = namespace; this.rpcHook = rpcHook; this.allocateMessageQueueStrategy = allocateMessageQueueStrategy; defaultMQPushConsumerImpl = new DefaultMQPushConsumerImpl(this, rpcHook); this.enableTrace = enableMsgTrace; this.traceTopic = customizedTraceTopic; } /** * This method will be removed in a certain version after April 5, 2020, so please do not use this method. */ @Deprecated @Override public void createTopic(String key, String newTopic, int queueNum, Map<String, String> attributes) throws MQClientException { createTopic(key, withNamespace(newTopic), queueNum, 0, null); } @Override public void setUseTLS(boolean useTLS) { super.setUseTLS(useTLS); } /** * This method will be removed in a certain version after April 5, 2020, so please do not use this method. */ @Deprecated @Override public void createTopic(String key, String newTopic, int queueNum, int topicSysFlag, Map<String, String> attributes) throws MQClientException { this.defaultMQPushConsumerImpl.createTopic(key, withNamespace(newTopic), queueNum, topicSysFlag); } /** * This method will be removed in a certain version after April 5, 2020, so please do not use this method. */ @Deprecated @Override public long searchOffset(MessageQueue mq, long timestamp) throws MQClientException { return this.defaultMQPushConsumerImpl.searchOffset(queueWithNamespace(mq), timestamp); } /** * This method will be removed in a certain version after April 5, 2020, so please do not use this method. */ @Deprecated @Override public long maxOffset(MessageQueue mq) throws MQClientException { return this.defaultMQPushConsumerImpl.maxOffset(queueWithNamespace(mq)); } /** * This method will be removed in a certain version after April 5, 2020, so please do not use this method. */ @Deprecated @Override public long minOffset(MessageQueue mq) throws MQClientException { return this.defaultMQPushConsumerImpl.minOffset(queueWithNamespace(mq)); } /** * This method will be removed in a certain version after April 5, 2020, so please do not use this method. */ @Deprecated @Override public long earliestMsgStoreTime(MessageQueue mq) throws MQClientException { return this.defaultMQPushConsumerImpl.earliestMsgStoreTime(queueWithNamespace(mq)); } /** * This method will be removed in a certain version after April 5, 2020, so please do not use this method. */ @Deprecated @Override public QueryResult queryMessage(String topic, String key, int maxNum, long begin, long end) throws MQClientException, InterruptedException { return this.defaultMQPushConsumerImpl.queryMessage(withNamespace(topic), key, maxNum, begin, end); } /** * This method will be removed in a certain version after April 5, 2020, so please do not use this method. */ @Deprecated @Override public MessageExt viewMessage(String topic, String msgId) throws RemotingException, MQBrokerException, InterruptedException, MQClientException { try { MessageDecoder.decodeMessageId(msgId); return this.defaultMQPushConsumerImpl.viewMessage(withNamespace(topic), msgId); } catch (Exception e) { // Ignore } return this.defaultMQPushConsumerImpl.queryMessageByUniqKey(withNamespace(topic), msgId); } public AllocateMessageQueueStrategy getAllocateMessageQueueStrategy() { return allocateMessageQueueStrategy; } public void setAllocateMessageQueueStrategy(AllocateMessageQueueStrategy allocateMessageQueueStrategy) { this.allocateMessageQueueStrategy = allocateMessageQueueStrategy; } public int getConsumeConcurrentlyMaxSpan() { return consumeConcurrentlyMaxSpan; } public void setConsumeConcurrentlyMaxSpan(int consumeConcurrentlyMaxSpan) { this.consumeConcurrentlyMaxSpan = consumeConcurrentlyMaxSpan; } public ConsumeFromWhere getConsumeFromWhere() { return consumeFromWhere; } public void setConsumeFromWhere(ConsumeFromWhere consumeFromWhere) { this.consumeFromWhere = consumeFromWhere; } public int getConsumeMessageBatchMaxSize() { return consumeMessageBatchMaxSize; } public void setConsumeMessageBatchMaxSize(int consumeMessageBatchMaxSize) { this.consumeMessageBatchMaxSize = consumeMessageBatchMaxSize; } public String getConsumerGroup() { return consumerGroup; } public void setConsumerGroup(String consumerGroup) { this.consumerGroup = consumerGroup; } public int getConsumeThreadMax() { return consumeThreadMax; } public void setConsumeThreadMax(int consumeThreadMax) { this.consumeThreadMax = consumeThreadMax; } public int getConsumeThreadMin() { return consumeThreadMin; } public void setConsumeThreadMin(int consumeThreadMin) { this.consumeThreadMin = consumeThreadMin; } /** * This method will be removed in a certain version after April 5, 2020, so please do not use this method. */ @Deprecated public DefaultMQPushConsumerImpl getDefaultMQPushConsumerImpl() { return defaultMQPushConsumerImpl; } public MessageListener getMessageListener() { return messageListener; } public void setMessageListener(MessageListener messageListener) { this.messageListener = messageListener; } public MessageModel getMessageModel() { return messageModel; } public void setMessageModel(MessageModel messageModel) { this.messageModel = messageModel; } public int getPullBatchSize() { return pullBatchSize; } public void setPullBatchSize(int pullBatchSize) { this.pullBatchSize = pullBatchSize; } public long getPullInterval() { return pullInterval; } public void setPullInterval(long pullInterval) { this.pullInterval = pullInterval; } public int getPullThresholdForQueue() { return pullThresholdForQueue; } public void setPullThresholdForQueue(int pullThresholdForQueue) { this.pullThresholdForQueue = pullThresholdForQueue; } public int getPopThresholdForQueue() { return popThresholdForQueue; } public void setPopThresholdForQueue(int popThresholdForQueue) { this.popThresholdForQueue = popThresholdForQueue; } public int getPullThresholdForTopic() { return pullThresholdForTopic; } public void setPullThresholdForTopic(final int pullThresholdForTopic) { this.pullThresholdForTopic = pullThresholdForTopic; } public int getPullThresholdSizeForQueue() { return pullThresholdSizeForQueue; } public void setPullThresholdSizeForQueue(final int pullThresholdSizeForQueue) { this.pullThresholdSizeForQueue = pullThresholdSizeForQueue; } public int getPullThresholdSizeForTopic() { return pullThresholdSizeForTopic; } public void setPullThresholdSizeForTopic(final int pullThresholdSizeForTopic) { this.pullThresholdSizeForTopic = pullThresholdSizeForTopic; } public Map<String, String> getSubscription() { return subscription; } /** * This method will be removed in a certain version after April 5, 2020, so please do not use this method. */ @Deprecated public void setSubscription(Map<String, String> subscription) { Map<String, String> subscriptionWithNamespace = new HashMap<>(subscription.size(), 1); for (Entry<String, String> topicEntry : subscription.entrySet()) { subscriptionWithNamespace.put(withNamespace(topicEntry.getKey()), topicEntry.getValue()); } this.subscription = subscriptionWithNamespace; } /** * Send message back to broker which will be re-delivered in future. * <p> * This method will be removed or it's visibility will be changed in a certain version after April 5, 2020, so * please do not use this method. * * @param msg Message to send back. * @param delayLevel delay level. * @throws RemotingException if there is any network-tier error. * @throws MQBrokerException if there is any broker error. * @throws InterruptedException if the thread is interrupted. * @throws MQClientException if there is any client error. */ @Deprecated @Override public void sendMessageBack(MessageExt msg, int delayLevel) throws RemotingException, MQBrokerException, InterruptedException, MQClientException { msg.setTopic(withNamespace(msg.getTopic())); this.defaultMQPushConsumerImpl.sendMessageBack(msg, delayLevel, msg.getBrokerName()); } /** * Send message back to the broker whose name is <code>brokerName</code> and the message will be re-delivered in * future. * <p> * This method will be removed or it's visibility will be changed in a certain version after April 5, 2020, so * please do not use this method. * * @param msg Message to send back. * @param delayLevel delay level. * @param brokerName broker name. * @throws RemotingException if there is any network-tier error. * @throws MQBrokerException if there is any broker error. * @throws InterruptedException if the thread is interrupted. * @throws MQClientException if there is any client error. */ @Deprecated @Override public void sendMessageBack(MessageExt msg, int delayLevel, String brokerName) throws RemotingException, MQBrokerException, InterruptedException, MQClientException { msg.setTopic(withNamespace(msg.getTopic())); this.defaultMQPushConsumerImpl.sendMessageBack(msg, delayLevel, brokerName); } @Override public Set<MessageQueue> fetchSubscribeMessageQueues(String topic) throws MQClientException { return this.defaultMQPushConsumerImpl.fetchSubscribeMessageQueues(withNamespace(topic)); } /** * This method gets internal infrastructure readily to serve. Instances must call this method after configuration. * * @throws MQClientException if there is any client error. */ @Override public void start() throws MQClientException { setConsumerGroup(NamespaceUtil.wrapNamespace(this.getNamespace(), this.consumerGroup)); this.defaultMQPushConsumerImpl.start(); if (enableTrace) { try { AsyncTraceDispatcher dispatcher = new AsyncTraceDispatcher(consumerGroup, TraceDispatcher.Type.CONSUME, getTraceMsgBatchNum(), traceTopic, rpcHook); dispatcher.setHostConsumer(this.defaultMQPushConsumerImpl); dispatcher.setNamespaceV2(namespaceV2); traceDispatcher = dispatcher; this.defaultMQPushConsumerImpl.registerConsumeMessageHook(new ConsumeMessageTraceHookImpl(traceDispatcher)); } catch (Throwable e) { log.error("system mqtrace hook init failed ,maybe can't send msg trace data"); } } if (null != traceDispatcher) { if (traceDispatcher instanceof AsyncTraceDispatcher) { ((AsyncTraceDispatcher) traceDispatcher).getTraceProducer().setUseTLS(isUseTLS()); } try { traceDispatcher.start(this.getNamesrvAddr(), this.getAccessChannel()); } catch (MQClientException e) { log.warn("trace dispatcher start failed ", e); } } } /** * Shut down this client and releasing underlying resources. */ @Override public void shutdown() { this.defaultMQPushConsumerImpl.shutdown(awaitTerminationMillisWhenShutdown); if (null != traceDispatcher) { traceDispatcher.shutdown(); } } @Override @Deprecated public void registerMessageListener(MessageListener messageListener) { this.messageListener = messageListener; this.defaultMQPushConsumerImpl.registerMessageListener(messageListener); } /** * Register a callback to execute on message arrival for concurrent consuming. * * @param messageListener message handling callback. */ @Override public void registerMessageListener(MessageListenerConcurrently messageListener) { this.messageListener = messageListener; this.defaultMQPushConsumerImpl.registerMessageListener(messageListener); } /** * Register a callback to execute on message arrival for orderly consuming. * * @param messageListener message handling callback. */ @Override public void registerMessageListener(MessageListenerOrderly messageListener) { this.messageListener = messageListener; this.defaultMQPushConsumerImpl.registerMessageListener(messageListener); } /** * Subscribe a topic to consuming subscription. * * @param topic topic to subscribe. * @param subExpression subscription expression.it only support or operation such as "tag1 || tag2 || tag3" <br> * if null or * expression,meaning subscribe all * @throws MQClientException if there is any client error. */ @Override public void subscribe(String topic, String subExpression) throws MQClientException { this.defaultMQPushConsumerImpl.subscribe(withNamespace(topic), subExpression); } /** * Subscribe a topic to consuming subscription. * * @param topic topic to consume. * @param fullClassName full class name,must extend org.apache.rocketmq.common.filter. MessageFilter * @param filterClassSource class source code,used UTF-8 file encoding,must be responsible for your code safety */ @Override public void subscribe(String topic, String fullClassName, String filterClassSource) throws MQClientException { this.defaultMQPushConsumerImpl.subscribe(withNamespace(topic), fullClassName, filterClassSource); } /** * Subscribe a topic by message selector. * * @param topic topic to consume. * @param messageSelector {@link org.apache.rocketmq.client.consumer.MessageSelector} * @see org.apache.rocketmq.client.consumer.MessageSelector#bySql * @see org.apache.rocketmq.client.consumer.MessageSelector#byTag */ @Override public void subscribe(final String topic, final MessageSelector messageSelector) throws MQClientException { this.defaultMQPushConsumerImpl.subscribe(withNamespace(topic), messageSelector); } /** * Un-subscribe the specified topic from subscription. * * @param topic message topic */ @Override public void unsubscribe(String topic) { this.defaultMQPushConsumerImpl.unsubscribe(topic); } /** * Update the message consuming thread core pool size. * * @param corePoolSize new core pool size. */ @Override public void updateCorePoolSize(int corePoolSize) { this.defaultMQPushConsumerImpl.updateCorePoolSize(corePoolSize); } /** * Suspend pulling new messages. */ @Override public void suspend() { this.defaultMQPushConsumerImpl.suspend(); } /** * Resume pulling. */ @Override public void resume() { this.defaultMQPushConsumerImpl.resume(); } public boolean isPause() { return this.defaultMQPushConsumerImpl.isPause(); } public boolean isConsumeOrderly() { return this.defaultMQPushConsumerImpl.isConsumeOrderly(); } public void registerConsumeMessageHook(final ConsumeMessageHook hook) { this.defaultMQPushConsumerImpl.registerConsumeMessageHook(hook); } /** * This method will be removed in a certain version after April 5, 2020, so please do not use this method. */ @Deprecated public OffsetStore getOffsetStore() { return offsetStore; } /** * This method will be removed in a certain version after April 5, 2020, so please do not use this method. */ @Deprecated public void setOffsetStore(OffsetStore offsetStore) { this.offsetStore = offsetStore; } public String getConsumeTimestamp() { return consumeTimestamp; } public void setConsumeTimestamp(String consumeTimestamp) { this.consumeTimestamp = consumeTimestamp; } public boolean isPostSubscriptionWhenPull() { return postSubscriptionWhenPull; } public void setPostSubscriptionWhenPull(boolean postSubscriptionWhenPull) { this.postSubscriptionWhenPull = postSubscriptionWhenPull; } @Override public boolean isUnitMode() { return unitMode; } @Override public void setUnitMode(boolean isUnitMode) { this.unitMode = isUnitMode; } public long getAdjustThreadPoolNumsThreshold() { return adjustThreadPoolNumsThreshold; } public void setAdjustThreadPoolNumsThreshold(long adjustThreadPoolNumsThreshold) { this.adjustThreadPoolNumsThreshold = adjustThreadPoolNumsThreshold; } public int getMaxReconsumeTimes() { return maxReconsumeTimes; } public void setMaxReconsumeTimes(final int maxReconsumeTimes) { this.maxReconsumeTimes = maxReconsumeTimes; } public long getSuspendCurrentQueueTimeMillis() { return suspendCurrentQueueTimeMillis; } public void setSuspendCurrentQueueTimeMillis(final long suspendCurrentQueueTimeMillis) { this.suspendCurrentQueueTimeMillis = suspendCurrentQueueTimeMillis; } public long getConsumeTimeout() { return consumeTimeout; } public void setConsumeTimeout(final long consumeTimeout) { this.consumeTimeout = consumeTimeout; } public long getPopInvisibleTime() { return popInvisibleTime; } public void setPopInvisibleTime(long popInvisibleTime) { this.popInvisibleTime = popInvisibleTime; } public long getAwaitTerminationMillisWhenShutdown() { return awaitTerminationMillisWhenShutdown; } public void setAwaitTerminationMillisWhenShutdown(long awaitTerminationMillisWhenShutdown) { this.awaitTerminationMillisWhenShutdown = awaitTerminationMillisWhenShutdown; } public int getPullBatchSizeInBytes() { return pullBatchSizeInBytes; } public void setPullBatchSizeInBytes(int pullBatchSizeInBytes) { this.pullBatchSizeInBytes = pullBatchSizeInBytes; } public TraceDispatcher getTraceDispatcher() { return traceDispatcher; } public int getPopBatchNums() { return popBatchNums; } public void setPopBatchNums(int popBatchNums) { this.popBatchNums = popBatchNums; } public boolean isClientRebalance() { return clientRebalance; } public void setClientRebalance(boolean clientRebalance) { this.clientRebalance = clientRebalance; } public MessageQueueListener getMessageQueueListener() { return messageQueueListener; } public void setMessageQueueListener(MessageQueueListener messageQueueListener) { this.messageQueueListener = messageQueueListener; } }
googleapis/google-cloud-java
36,520
java-dataform/proto-google-cloud-dataform-v1/src/main/java/com/google/cloud/dataform/v1/CreateRepositoryRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dataform/v1/dataform.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dataform.v1; /** * * * <pre> * `CreateRepository` request message. * </pre> * * Protobuf type {@code google.cloud.dataform.v1.CreateRepositoryRequest} */ public final class CreateRepositoryRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dataform.v1.CreateRepositoryRequest) CreateRepositoryRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateRepositoryRequest.newBuilder() to construct. private CreateRepositoryRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateRepositoryRequest() { parent_ = ""; repositoryId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateRepositoryRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataform.v1.DataformProto .internal_static_google_cloud_dataform_v1_CreateRepositoryRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataform.v1.DataformProto .internal_static_google_cloud_dataform_v1_CreateRepositoryRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataform.v1.CreateRepositoryRequest.class, com.google.cloud.dataform.v1.CreateRepositoryRequest.Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The location in which to create the repository. Must be in the * format `projects/&#42;&#47;locations/&#42;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The location in which to create the repository. Must be in the * format `projects/&#42;&#47;locations/&#42;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int REPOSITORY_FIELD_NUMBER = 2; private com.google.cloud.dataform.v1.Repository repository_; /** * * * <pre> * Required. The repository to create. * </pre> * * <code> * .google.cloud.dataform.v1.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the repository field is set. */ @java.lang.Override public boolean hasRepository() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The repository to create. * </pre> * * <code> * .google.cloud.dataform.v1.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The repository. */ @java.lang.Override public com.google.cloud.dataform.v1.Repository getRepository() { return repository_ == null ? com.google.cloud.dataform.v1.Repository.getDefaultInstance() : repository_; } /** * * * <pre> * Required. The repository to create. * </pre> * * <code> * .google.cloud.dataform.v1.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.dataform.v1.RepositoryOrBuilder getRepositoryOrBuilder() { return repository_ == null ? com.google.cloud.dataform.v1.Repository.getDefaultInstance() : repository_; } public static final int REPOSITORY_ID_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object repositoryId_ = ""; /** * * * <pre> * Required. The ID to use for the repository, which will become the final * component of the repository's resource name. * </pre> * * <code>string repository_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The repositoryId. */ @java.lang.Override public java.lang.String getRepositoryId() { java.lang.Object ref = repositoryId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); repositoryId_ = s; return s; } } /** * * * <pre> * Required. The ID to use for the repository, which will become the final * component of the repository's resource name. * </pre> * * <code>string repository_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for repositoryId. */ @java.lang.Override public com.google.protobuf.ByteString getRepositoryIdBytes() { java.lang.Object ref = repositoryId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); repositoryId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getRepository()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(repositoryId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, repositoryId_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getRepository()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(repositoryId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, repositoryId_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dataform.v1.CreateRepositoryRequest)) { return super.equals(obj); } com.google.cloud.dataform.v1.CreateRepositoryRequest other = (com.google.cloud.dataform.v1.CreateRepositoryRequest) obj; if (!getParent().equals(other.getParent())) return false; if (hasRepository() != other.hasRepository()) return false; if (hasRepository()) { if (!getRepository().equals(other.getRepository())) return false; } if (!getRepositoryId().equals(other.getRepositoryId())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); if (hasRepository()) { hash = (37 * hash) + REPOSITORY_FIELD_NUMBER; hash = (53 * hash) + getRepository().hashCode(); } hash = (37 * hash) + REPOSITORY_ID_FIELD_NUMBER; hash = (53 * hash) + getRepositoryId().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dataform.v1.CreateRepositoryRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataform.v1.CreateRepositoryRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataform.v1.CreateRepositoryRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataform.v1.CreateRepositoryRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataform.v1.CreateRepositoryRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataform.v1.CreateRepositoryRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataform.v1.CreateRepositoryRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataform.v1.CreateRepositoryRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataform.v1.CreateRepositoryRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dataform.v1.CreateRepositoryRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataform.v1.CreateRepositoryRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataform.v1.CreateRepositoryRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.dataform.v1.CreateRepositoryRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * `CreateRepository` request message. * </pre> * * Protobuf type {@code google.cloud.dataform.v1.CreateRepositoryRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dataform.v1.CreateRepositoryRequest) com.google.cloud.dataform.v1.CreateRepositoryRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataform.v1.DataformProto .internal_static_google_cloud_dataform_v1_CreateRepositoryRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataform.v1.DataformProto .internal_static_google_cloud_dataform_v1_CreateRepositoryRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataform.v1.CreateRepositoryRequest.class, com.google.cloud.dataform.v1.CreateRepositoryRequest.Builder.class); } // Construct using com.google.cloud.dataform.v1.CreateRepositoryRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getRepositoryFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; repository_ = null; if (repositoryBuilder_ != null) { repositoryBuilder_.dispose(); repositoryBuilder_ = null; } repositoryId_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dataform.v1.DataformProto .internal_static_google_cloud_dataform_v1_CreateRepositoryRequest_descriptor; } @java.lang.Override public com.google.cloud.dataform.v1.CreateRepositoryRequest getDefaultInstanceForType() { return com.google.cloud.dataform.v1.CreateRepositoryRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dataform.v1.CreateRepositoryRequest build() { com.google.cloud.dataform.v1.CreateRepositoryRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dataform.v1.CreateRepositoryRequest buildPartial() { com.google.cloud.dataform.v1.CreateRepositoryRequest result = new com.google.cloud.dataform.v1.CreateRepositoryRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.dataform.v1.CreateRepositoryRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.repository_ = repositoryBuilder_ == null ? repository_ : repositoryBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.repositoryId_ = repositoryId_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dataform.v1.CreateRepositoryRequest) { return mergeFrom((com.google.cloud.dataform.v1.CreateRepositoryRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dataform.v1.CreateRepositoryRequest other) { if (other == com.google.cloud.dataform.v1.CreateRepositoryRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasRepository()) { mergeRepository(other.getRepository()); } if (!other.getRepositoryId().isEmpty()) { repositoryId_ = other.repositoryId_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getRepositoryFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 26: { repositoryId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The location in which to create the repository. Must be in the * format `projects/&#42;&#47;locations/&#42;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The location in which to create the repository. Must be in the * format `projects/&#42;&#47;locations/&#42;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The location in which to create the repository. Must be in the * format `projects/&#42;&#47;locations/&#42;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The location in which to create the repository. Must be in the * format `projects/&#42;&#47;locations/&#42;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The location in which to create the repository. Must be in the * format `projects/&#42;&#47;locations/&#42;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.cloud.dataform.v1.Repository repository_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataform.v1.Repository, com.google.cloud.dataform.v1.Repository.Builder, com.google.cloud.dataform.v1.RepositoryOrBuilder> repositoryBuilder_; /** * * * <pre> * Required. The repository to create. * </pre> * * <code> * .google.cloud.dataform.v1.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the repository field is set. */ public boolean hasRepository() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The repository to create. * </pre> * * <code> * .google.cloud.dataform.v1.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The repository. */ public com.google.cloud.dataform.v1.Repository getRepository() { if (repositoryBuilder_ == null) { return repository_ == null ? com.google.cloud.dataform.v1.Repository.getDefaultInstance() : repository_; } else { return repositoryBuilder_.getMessage(); } } /** * * * <pre> * Required. The repository to create. * </pre> * * <code> * .google.cloud.dataform.v1.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setRepository(com.google.cloud.dataform.v1.Repository value) { if (repositoryBuilder_ == null) { if (value == null) { throw new NullPointerException(); } repository_ = value; } else { repositoryBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The repository to create. * </pre> * * <code> * .google.cloud.dataform.v1.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setRepository(com.google.cloud.dataform.v1.Repository.Builder builderForValue) { if (repositoryBuilder_ == null) { repository_ = builderForValue.build(); } else { repositoryBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The repository to create. * </pre> * * <code> * .google.cloud.dataform.v1.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeRepository(com.google.cloud.dataform.v1.Repository value) { if (repositoryBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && repository_ != null && repository_ != com.google.cloud.dataform.v1.Repository.getDefaultInstance()) { getRepositoryBuilder().mergeFrom(value); } else { repository_ = value; } } else { repositoryBuilder_.mergeFrom(value); } if (repository_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The repository to create. * </pre> * * <code> * .google.cloud.dataform.v1.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearRepository() { bitField0_ = (bitField0_ & ~0x00000002); repository_ = null; if (repositoryBuilder_ != null) { repositoryBuilder_.dispose(); repositoryBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The repository to create. * </pre> * * <code> * .google.cloud.dataform.v1.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dataform.v1.Repository.Builder getRepositoryBuilder() { bitField0_ |= 0x00000002; onChanged(); return getRepositoryFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The repository to create. * </pre> * * <code> * .google.cloud.dataform.v1.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dataform.v1.RepositoryOrBuilder getRepositoryOrBuilder() { if (repositoryBuilder_ != null) { return repositoryBuilder_.getMessageOrBuilder(); } else { return repository_ == null ? com.google.cloud.dataform.v1.Repository.getDefaultInstance() : repository_; } } /** * * * <pre> * Required. The repository to create. * </pre> * * <code> * .google.cloud.dataform.v1.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataform.v1.Repository, com.google.cloud.dataform.v1.Repository.Builder, com.google.cloud.dataform.v1.RepositoryOrBuilder> getRepositoryFieldBuilder() { if (repositoryBuilder_ == null) { repositoryBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataform.v1.Repository, com.google.cloud.dataform.v1.Repository.Builder, com.google.cloud.dataform.v1.RepositoryOrBuilder>( getRepository(), getParentForChildren(), isClean()); repository_ = null; } return repositoryBuilder_; } private java.lang.Object repositoryId_ = ""; /** * * * <pre> * Required. The ID to use for the repository, which will become the final * component of the repository's resource name. * </pre> * * <code>string repository_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The repositoryId. */ public java.lang.String getRepositoryId() { java.lang.Object ref = repositoryId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); repositoryId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The ID to use for the repository, which will become the final * component of the repository's resource name. * </pre> * * <code>string repository_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for repositoryId. */ public com.google.protobuf.ByteString getRepositoryIdBytes() { java.lang.Object ref = repositoryId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); repositoryId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The ID to use for the repository, which will become the final * component of the repository's resource name. * </pre> * * <code>string repository_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The repositoryId to set. * @return This builder for chaining. */ public Builder setRepositoryId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } repositoryId_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. The ID to use for the repository, which will become the final * component of the repository's resource name. * </pre> * * <code>string repository_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearRepositoryId() { repositoryId_ = getDefaultInstance().getRepositoryId(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Required. The ID to use for the repository, which will become the final * component of the repository's resource name. * </pre> * * <code>string repository_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for repositoryId to set. * @return This builder for chaining. */ public Builder setRepositoryIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); repositoryId_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dataform.v1.CreateRepositoryRequest) } // @@protoc_insertion_point(class_scope:google.cloud.dataform.v1.CreateRepositoryRequest) private static final com.google.cloud.dataform.v1.CreateRepositoryRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dataform.v1.CreateRepositoryRequest(); } public static com.google.cloud.dataform.v1.CreateRepositoryRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateRepositoryRequest> PARSER = new com.google.protobuf.AbstractParser<CreateRepositoryRequest>() { @java.lang.Override public CreateRepositoryRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateRepositoryRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateRepositoryRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dataform.v1.CreateRepositoryRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
oracle/graal
36,888
compiler/src/jdk.graal.compiler/src/jdk/graal/compiler/core/amd64/AMD64NodeMatchRules.java
/* * Copyright (c) 2009, 2025, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package jdk.graal.compiler.core.amd64; import static jdk.graal.compiler.asm.amd64.AMD64Assembler.AMD64BinaryArithmetic.ADD; import static jdk.graal.compiler.asm.amd64.AMD64Assembler.AMD64BinaryArithmetic.AND; import static jdk.graal.compiler.asm.amd64.AMD64Assembler.AMD64BinaryArithmetic.OR; import static jdk.graal.compiler.asm.amd64.AMD64Assembler.AMD64BinaryArithmetic.SUB; import static jdk.graal.compiler.asm.amd64.AMD64Assembler.AMD64BinaryArithmetic.XOR; import static jdk.graal.compiler.asm.amd64.AMD64Assembler.AMD64RMOp.MOVSX; import static jdk.graal.compiler.asm.amd64.AMD64Assembler.AMD64RMOp.MOVSXB; import static jdk.graal.compiler.asm.amd64.AMD64Assembler.AMD64RMOp.MOVSXD; import static jdk.graal.compiler.asm.amd64.AMD64Assembler.AMD64Shift.ROL; import static jdk.graal.compiler.asm.amd64.AMD64Assembler.VexRVMOp.VADDSD; import static jdk.graal.compiler.asm.amd64.AMD64Assembler.VexRVMOp.VADDSS; import static jdk.graal.compiler.asm.amd64.AMD64Assembler.VexRVMOp.VMULSD; import static jdk.graal.compiler.asm.amd64.AMD64Assembler.VexRVMOp.VMULSS; import static jdk.graal.compiler.asm.amd64.AMD64Assembler.VexRVMOp.VSUBSD; import static jdk.graal.compiler.asm.amd64.AMD64Assembler.VexRVMOp.VSUBSS; import static jdk.graal.compiler.asm.amd64.AMD64BaseAssembler.OperandSize.DWORD; import static jdk.graal.compiler.asm.amd64.AMD64BaseAssembler.OperandSize.QWORD; import static jdk.graal.compiler.asm.amd64.AMD64BaseAssembler.OperandSize.SD; import static jdk.graal.compiler.asm.amd64.AMD64BaseAssembler.OperandSize.SS; import jdk.graal.compiler.asm.amd64.AMD64Assembler; import jdk.graal.compiler.asm.amd64.AMD64Assembler.AMD64RMOp; import jdk.graal.compiler.asm.amd64.AMD64Assembler.SSEOp; import jdk.graal.compiler.asm.amd64.AMD64BaseAssembler.OperandSize; import jdk.graal.compiler.core.common.LIRKind; import jdk.graal.compiler.core.common.NumUtil; import jdk.graal.compiler.core.common.calc.CanonicalCondition; import jdk.graal.compiler.core.common.calc.Condition; import jdk.graal.compiler.core.common.calc.FloatConvertCategory; import jdk.graal.compiler.core.common.memory.MemoryExtendKind; import jdk.graal.compiler.core.common.memory.MemoryOrderMode; import jdk.graal.compiler.core.common.type.PrimitiveStamp; import jdk.graal.compiler.core.gen.NodeLIRBuilder; import jdk.graal.compiler.core.gen.NodeMatchRules; import jdk.graal.compiler.core.match.ComplexMatchResult; import jdk.graal.compiler.core.match.MatchRule; import jdk.graal.compiler.debug.Assertions; import jdk.graal.compiler.debug.GraalError; import jdk.graal.compiler.lir.CastValue; import jdk.graal.compiler.lir.LIRFrameState; import jdk.graal.compiler.lir.LIRValueUtil; import jdk.graal.compiler.lir.LabelRef; import jdk.graal.compiler.lir.amd64.AMD64AddressValue; import jdk.graal.compiler.lir.amd64.AMD64BinaryConsumer; import jdk.graal.compiler.lir.amd64.AMD64ControlFlow; import jdk.graal.compiler.lir.amd64.AMD64ControlFlow.TestBranchOp; import jdk.graal.compiler.lir.amd64.AMD64ControlFlow.TestConstBranchOp; import jdk.graal.compiler.lir.amd64.AMD64UnaryConsumer; import jdk.graal.compiler.lir.gen.LIRGeneratorTool; import jdk.graal.compiler.nodes.ConstantNode; import jdk.graal.compiler.nodes.DeoptimizingNode; import jdk.graal.compiler.nodes.IfNode; import jdk.graal.compiler.nodes.NodeView; import jdk.graal.compiler.nodes.ValueNode; import jdk.graal.compiler.nodes.calc.CompareNode; import jdk.graal.compiler.nodes.calc.FloatConvertNode; import jdk.graal.compiler.nodes.calc.LeftShiftNode; import jdk.graal.compiler.nodes.calc.NarrowNode; import jdk.graal.compiler.nodes.calc.ReinterpretNode; import jdk.graal.compiler.nodes.calc.SignExtendNode; import jdk.graal.compiler.nodes.calc.UnsignedRightShiftNode; import jdk.graal.compiler.nodes.calc.ZeroExtendNode; import jdk.graal.compiler.nodes.java.LogicCompareAndSwapNode; import jdk.graal.compiler.nodes.java.ValueCompareAndSwapNode; import jdk.graal.compiler.nodes.memory.AddressableMemoryAccess; import jdk.graal.compiler.nodes.memory.LIRLowerableAccess; import jdk.graal.compiler.nodes.memory.MemoryAccess; import jdk.graal.compiler.nodes.memory.ReadNode; import jdk.graal.compiler.nodes.memory.WriteNode; import jdk.graal.compiler.nodes.util.GraphUtil; import jdk.vm.ci.amd64.AMD64; import jdk.vm.ci.amd64.AMD64.CPUFeature; import jdk.vm.ci.amd64.AMD64Kind; import jdk.vm.ci.meta.AllocatableValue; import jdk.vm.ci.meta.JavaConstant; import jdk.vm.ci.meta.JavaKind; import jdk.vm.ci.meta.PlatformKind; import jdk.vm.ci.meta.Value; import jdk.vm.ci.meta.ValueKind; public class AMD64NodeMatchRules extends NodeMatchRules { /* * Note that many of the read + action simplifications here may convert an ordered read into a * plain read. However, since on AMD64 no fences/barriers are added for ordered reads and also * this pattern matching comes after all memory movement, it does not affect program * correctness. */ public AMD64NodeMatchRules(LIRGeneratorTool gen) { super(gen); } protected LIRFrameState getState(MemoryAccess access) { if (access instanceof DeoptimizingNode) { return state((DeoptimizingNode) access); } return null; } protected AMD64Kind getMemoryKind(LIRLowerableAccess access) { return (AMD64Kind) getLirKind(access).getPlatformKind(); } protected LIRKind getLirKind(LIRLowerableAccess access) { return gen.getLIRKind(access.getAccessStamp(NodeView.DEFAULT)); } protected OperandSize getMemorySize(LIRLowerableAccess access) { switch (getMemoryKind(access)) { case BYTE: return OperandSize.BYTE; case WORD: return OperandSize.WORD; case DWORD: return OperandSize.DWORD; case QWORD: return OperandSize.QWORD; case SINGLE: return OperandSize.SS; case DOUBLE: return OperandSize.SD; default: throw GraalError.shouldNotReachHere("unsupported memory access type " + getMemoryKind(access)); // ExcludeFromJacocoGeneratedReport } } protected ComplexMatchResult emitCompareBranchMemory(IfNode ifNode, CompareNode compare, ValueNode value, LIRLowerableAccess access) { Condition cond = compare.condition().asCondition(); AMD64Kind kind = getMemoryKind(access); boolean matchedAsConstant = false; // For assertion checking if (value.isConstant()) { JavaConstant constant = value.asJavaConstant(); if (constant != null) { if (kind == AMD64Kind.QWORD && !constant.getJavaKind().isObject() && !NumUtil.isInt(constant.asLong())) { // Only imm32 as long return null; } // A QWORD that can be encoded as int can be embedded as a constant matchedAsConstant = kind == AMD64Kind.QWORD && !constant.getJavaKind().isObject() && NumUtil.isInt(constant.asLong()); } if (kind == AMD64Kind.DWORD) { // Any DWORD value should be embeddable as a constant matchedAsConstant = true; } if (kind.isXMM()) { ifNode.getDebug().log("Skipping constant compares for float kinds"); return null; } } boolean matchedAsConstantFinal = matchedAsConstant; /* * emitCompareBranchMemory expects the memory on the right, so mirror the condition if * that's not true. It might be mirrored again the actual compare is emitted but that's ok. */ Condition finalCondition = GraphUtil.unproxify(compare.getX()) == access ? cond.mirror() : cond; return new ComplexMatchResult() { @Override public Value evaluate(NodeLIRBuilder builder) { LabelRef trueLabel = getLIRBlock(ifNode.trueSuccessor()); LabelRef falseLabel = getLIRBlock(ifNode.falseSuccessor()); boolean unorderedIsTrue = compare.unorderedIsTrue(); double trueLabelProbability = ifNode.probability(ifNode.trueSuccessor()); Value other = operand(value); /* * Check that patterns which were matched as a constant actually end up seeing a * constant in the LIR. */ assert !matchedAsConstantFinal || !LIRValueUtil.isVariable(other) : "expected constant value " + value; AMD64AddressValue address = (AMD64AddressValue) operand(access.getAddress()); getLIRGeneratorTool().emitCompareBranchMemory(kind, other, address, getState(access), finalCondition, unorderedIsTrue, trueLabel, falseLabel, trueLabelProbability); return null; } }; } private ComplexMatchResult emitIntegerTestBranchMemory(IfNode x, ValueNode value, LIRLowerableAccess access) { LabelRef trueLabel = getLIRBlock(x.trueSuccessor()); LabelRef falseLabel = getLIRBlock(x.falseSuccessor()); double trueLabelProbability = x.probability(x.trueSuccessor()); AMD64Kind kind = getMemoryKind(access); OperandSize size = kind == AMD64Kind.QWORD ? QWORD : DWORD; if (kind.getVectorLength() > 1) { return null; } if (value.isJavaConstant()) { JavaConstant constant = value.asJavaConstant(); if (kind == AMD64Kind.QWORD && !NumUtil.isInt(constant.asLong())) { // Only imm32 as long return null; } return builder -> { AMD64AddressValue address = (AMD64AddressValue) operand(access.getAddress()); gen.append(new TestConstBranchOp(size, address, (int) constant.asLong(), getState(access), Condition.EQ, trueLabel, falseLabel, trueLabelProbability)); return null; }; } else { return builder -> { AMD64AddressValue address = (AMD64AddressValue) operand(access.getAddress()); gen.append(new TestBranchOp(size, gen.asAllocatable(operand(value)), address, getState(access), Condition.EQ, trueLabel, falseLabel, trueLabelProbability)); return null; }; } } protected ComplexMatchResult emitConvertMemoryOp(PlatformKind kind, AMD64RMOp op, OperandSize size, AddressableMemoryAccess access, ValueKind<?> addressKind) { return builder -> { AMD64AddressValue address = (AMD64AddressValue) operand(access.getAddress()); LIRFrameState state = getState(access); if (addressKind != null) { address = address.withKind(addressKind); } return getArithmeticLIRGenerator().emitConvertMemoryOp(kind, op, size, address, state); }; } protected ComplexMatchResult emitConvertMemoryOp(PlatformKind kind, AMD64RMOp op, OperandSize size, AddressableMemoryAccess access) { return emitConvertMemoryOp(kind, op, size, access, null); } private ComplexMatchResult emitSignExtendMemory(AddressableMemoryAccess access, int fromBits, int toBits, ValueKind<?> addressKind) { assert fromBits <= toBits && toBits <= 64 : fromBits + " " + toBits; AMD64Kind kind = null; AMD64RMOp op; OperandSize size; if (fromBits == toBits) { return null; } else if (toBits > 32) { kind = AMD64Kind.QWORD; size = OperandSize.QWORD; // sign extend to 64 bits switch (fromBits) { case 8: op = MOVSXB; break; case 16: op = MOVSX; break; case 32: op = MOVSXD; break; default: throw GraalError.unimplemented("unsupported sign extension (" + fromBits + " bit -> " + toBits + " bit)"); // ExcludeFromJacocoGeneratedReport } } else { kind = AMD64Kind.DWORD; size = OperandSize.DWORD; // sign extend to 32 bits (smaller values are internally represented as 32 bit values) switch (fromBits) { case 8: op = MOVSXB; break; case 16: op = MOVSX; break; case 32: return null; default: throw GraalError.unimplemented("unsupported sign extension (" + fromBits + " bit -> " + toBits + " bit)"); // ExcludeFromJacocoGeneratedReport } } if (kind != null && op != null) { return emitConvertMemoryOp(kind, op, size, access, addressKind); } return null; } private Value emitReinterpretMemory(LIRKind to, AddressableMemoryAccess access) { AMD64AddressValue address = (AMD64AddressValue) operand(access.getAddress()); LIRFrameState state = getState(access); return getArithmeticLIRGenerator().emitLoad(to, address, state, MemoryOrderMode.PLAIN, MemoryExtendKind.DEFAULT); } private boolean supports(CPUFeature feature) { return ((AMD64) getLIRGeneratorTool().target().arch).getFeatures().contains(feature); } @MatchRule("(And (Not a) b)") public ComplexMatchResult logicalAndNot(ValueNode a, ValueNode b) { if (!supports(CPUFeature.BMI1)) { return null; } return builder -> getArithmeticLIRGenerator().emitLogicalAndNot(operand(a), operand(b)); } @MatchRule("(And a (Negate a))") public ComplexMatchResult lowestSetIsolatedBit(ValueNode a) { if (!supports(CPUFeature.BMI1)) { return null; } return builder -> getArithmeticLIRGenerator().emitLowestSetIsolatedBit(operand(a)); } @MatchRule("(Xor a (Add a b))") public ComplexMatchResult getMaskUpToLowestSetBit(ValueNode a, ValueNode b) { if (!supports(CPUFeature.BMI1)) { return null; } // Make sure that the pattern matches a subtraction by one. if (!b.isJavaConstant()) { return null; } JavaConstant bCst = b.asJavaConstant(); long bValue; if (bCst.getJavaKind() == JavaKind.Int) { bValue = bCst.asInt(); } else if (bCst.getJavaKind() == JavaKind.Long) { bValue = bCst.asLong(); } else { return null; } if (bValue == -1) { return builder -> getArithmeticLIRGenerator().emitGetMaskUpToLowestSetBit(operand(a)); } else { return null; } } @MatchRule("(And a (Add a b))") public ComplexMatchResult resetLowestSetBit(ValueNode a, ValueNode b) { if (!supports(CPUFeature.BMI1)) { return null; } // Make sure that the pattern matches a subtraction by one. if (!b.isJavaConstant()) { return null; } JavaConstant bCst = b.asJavaConstant(); long bValue; if (bCst.getJavaKind() == JavaKind.Int) { bValue = bCst.asInt(); } else if (bCst.getJavaKind() == JavaKind.Long) { bValue = bCst.asLong(); } else { return null; } if (bValue == -1) { return builder -> getArithmeticLIRGenerator().emitResetLowestSetBit(operand(a)); } else { return null; } } @MatchRule("(If (IntegerTest value Constant=a))") public ComplexMatchResult testBitAndBranch(IfNode root, ValueNode value, ConstantNode a) { long constant = a.asJavaConstant().asLong(); if (Long.bitCount(constant) == 1 && !NumUtil.isUByte(constant)) { return builder -> { LabelRef trueDestination = getLIRBlock(root.trueSuccessor()); LabelRef falseDestination = getLIRBlock(root.falseSuccessor()); gen.append(new AMD64ControlFlow.BitTestAndBranchOp(trueDestination, falseDestination, gen.asAllocatable(operand(value)), root.getTrueSuccessorProbability(), Long.numberOfTrailingZeros(constant))); return null; }; } return null; } @MatchRule("(If (IntegerTest Read=access value))") public ComplexMatchResult integerTestBranchMemory(IfNode root, LIRLowerableAccess access, ValueNode value) { return emitIntegerTestBranchMemory(root, value, access); } @MatchRule("(If (IntegerEquals=compare value Read=access))") @MatchRule("(If (IntegerLessThan=compare value Read=access))") @MatchRule("(If (IntegerBelow=compare value Read=access))") @MatchRule("(If (FloatEquals=compare value Read=access))") @MatchRule("(If (FloatLessThan=compare value Read=access))") @MatchRule("(If (PointerEquals=compare value Read=access))") @MatchRule("(If (ObjectEquals=compare value Read=access))") public ComplexMatchResult ifCompareMemory(IfNode root, CompareNode compare, ValueNode value, LIRLowerableAccess access) { return emitCompareBranchMemory(root, compare, value, access); } @MatchRule("(If (ObjectEquals=compare value ValueCompareAndSwap=cas))") @MatchRule("(If (PointerEquals=compare value ValueCompareAndSwap=cas))") @MatchRule("(If (FloatEquals=compare value ValueCompareAndSwap=cas))") @MatchRule("(If (IntegerEquals=compare value ValueCompareAndSwap=cas))") public ComplexMatchResult ifCompareValueCas(IfNode root, CompareNode compare, ValueNode value, ValueCompareAndSwapNode cas) { assert compare.condition() == CanonicalCondition.EQ : Assertions.errorMessage(compare, value, cas); if (value == cas.getExpectedValue() && cas.hasExactlyOneUsage()) { return builder -> { LIRKind kind = getLirKind(cas); LabelRef trueLabel = getLIRBlock(root.trueSuccessor()); LabelRef falseLabel = getLIRBlock(root.falseSuccessor()); double trueLabelProbability = root.probability(root.trueSuccessor()); Value expectedValue = operand(cas.getExpectedValue()); Value newValue = operand(cas.getNewValue()); AMD64AddressValue address = (AMD64AddressValue) operand(cas.getAddress()); getLIRGeneratorTool().emitCompareAndSwapBranch(false, kind, address, expectedValue, newValue, Condition.EQ, trueLabel, falseLabel, trueLabelProbability, cas.getBarrierType()); return null; }; } return null; } @MatchRule("(If (ObjectEquals=compare value LogicCompareAndSwap=cas))") @MatchRule("(If (PointerEquals=compare value LogicCompareAndSwap=cas))") @MatchRule("(If (FloatEquals=compare value LogicCompareAndSwap=cas))") @MatchRule("(If (IntegerEquals=compare value LogicCompareAndSwap=cas))") public ComplexMatchResult ifCompareLogicCas(IfNode root, CompareNode compare, ValueNode value, LogicCompareAndSwapNode cas) { JavaConstant constant = value.asJavaConstant(); assert compare.condition() == CanonicalCondition.EQ : Assertions.errorMessage(root, compare, value, cas); if (constant != null && cas.hasExactlyOneUsage()) { long constantValue = constant.asLong(); boolean successIsTrue; if (constantValue == 0) { successIsTrue = false; } else if (constantValue == 1) { successIsTrue = true; } else { return null; } return builder -> { LIRKind kind = getLirKind(cas); LabelRef trueLabel = getLIRBlock(root.trueSuccessor()); LabelRef falseLabel = getLIRBlock(root.falseSuccessor()); double trueLabelProbability = root.probability(root.trueSuccessor()); Value expectedValue = operand(cas.getExpectedValue()); Value newValue = operand(cas.getNewValue()); AMD64AddressValue address = (AMD64AddressValue) operand(cas.getAddress()); Condition condition = successIsTrue ? Condition.EQ : Condition.NE; getLIRGeneratorTool().emitCompareAndSwapBranch(true, kind, address, expectedValue, newValue, condition, trueLabel, falseLabel, trueLabelProbability, cas.getBarrierType()); return null; }; } return null; } public ComplexMatchResult ifLogicCas(IfNode root, CompareNode compare, ValueNode value, LIRLowerableAccess access) { return emitCompareBranchMemory(root, compare, value, access); } @MatchRule("(Or (LeftShift=lshift value Constant) (UnsignedRightShift=rshift value Constant))") public ComplexMatchResult rotateLeftConstant(LeftShiftNode lshift, UnsignedRightShiftNode rshift) { JavaConstant lshiftConst = lshift.getY().asJavaConstant(); JavaConstant rshiftConst = rshift.getY().asJavaConstant(); if ((lshift.getShiftAmountMask() & (lshiftConst.asInt() + rshiftConst.asInt())) == 0) { return builder -> { Value a = operand(lshift.getX()); OperandSize size = OperandSize.get(a.getPlatformKind()); assert size == OperandSize.DWORD || size == OperandSize.QWORD : size; return getArithmeticLIRGenerator().emitShiftConst(ROL, size, a, lshiftConst); }; } return null; } @MatchRule("(Or (LeftShift value (Sub Constant=delta shiftAmount)) (UnsignedRightShift value shiftAmount))") public ComplexMatchResult rotateRightVariable(ValueNode value, ConstantNode delta, ValueNode shiftAmount) { if (delta.asJavaConstant().asLong() == 0 || delta.asJavaConstant().asLong() == 32) { return builder -> getArithmeticLIRGenerator().emitRor(operand(value), operand(shiftAmount)); } return null; } @MatchRule("(Or (LeftShift value shiftAmount) (UnsignedRightShift value (Sub Constant=delta shiftAmount)))") public ComplexMatchResult rotateLeftVariable(ValueNode value, ValueNode shiftAmount, ConstantNode delta) { if (delta.asJavaConstant().asLong() == 0 || delta.asJavaConstant().asLong() == 32) { return builder -> getArithmeticLIRGenerator().emitRol(operand(value), operand(shiftAmount)); } return null; } private ComplexMatchResult binaryRead(AMD64RMOp op, OperandSize size, ValueNode value, LIRLowerableAccess access) { return builder -> getArithmeticLIRGenerator().emitBinaryMemory(op, size, getLIRGeneratorTool().asAllocatable(operand(value)), (AMD64AddressValue) operand(access.getAddress()), getState(access)); } private ComplexMatchResult binaryRead(AMD64Assembler.VexRVMOp op, OperandSize size, ValueNode value, LIRLowerableAccess access) { assert size == SS || size == SD : size; return builder -> getArithmeticLIRGenerator().emitBinaryMemory(op, size, getLIRGeneratorTool().asAllocatable(operand(value)), (AMD64AddressValue) operand(access.getAddress()), getState(access)); } @MatchRule("(Add value Read=access)") public ComplexMatchResult addMemory(ValueNode value, LIRLowerableAccess access) { OperandSize size = getMemorySize(access); if (size.isXmmType()) { if (getArithmeticLIRGenerator().supportAVX()) { return binaryRead(size == SS ? VADDSS : VADDSD, size, value, access); } else { return binaryRead(SSEOp.ADD, size, value, access); } } else { return binaryRead(ADD.getRMOpcode(size), size, value, access); } } @MatchRule("(Sub value Read=access)") public ComplexMatchResult subMemory(ValueNode value, LIRLowerableAccess access) { OperandSize size = getMemorySize(access); if (size.isXmmType()) { if (getArithmeticLIRGenerator().supportAVX()) { return binaryRead(size == SS ? VSUBSS : VSUBSD, size, value, access); } else { return binaryRead(SSEOp.SUB, size, value, access); } } else { return binaryRead(SUB.getRMOpcode(size), size, value, access); } } @MatchRule("(Mul value Read=access)") public ComplexMatchResult mulMemory(ValueNode value, LIRLowerableAccess access) { OperandSize size = getMemorySize(access); if (size.isXmmType()) { if (getArithmeticLIRGenerator().supportAVX()) { return binaryRead(size == SS ? VMULSS : VMULSD, size, value, access); } else { return binaryRead(SSEOp.MUL, size, value, access); } } else { return binaryRead(AMD64RMOp.IMUL, size, value, access); } } @MatchRule("(And value Read=access)") public ComplexMatchResult andMemory(ValueNode value, LIRLowerableAccess access) { OperandSize size = getMemorySize(access); if (size.isXmmType()) { return null; } else { return binaryRead(AND.getRMOpcode(size), size, value, access); } } @MatchRule("(Or value Read=access)") public ComplexMatchResult orMemory(ValueNode value, LIRLowerableAccess access) { OperandSize size = getMemorySize(access); if (size.isXmmType()) { return null; } else { return binaryRead(OR.getRMOpcode(size), size, value, access); } } @MatchRule("(Xor value Read=access)") public ComplexMatchResult xorMemory(ValueNode value, LIRLowerableAccess access) { OperandSize size = getMemorySize(access); if (size.isXmmType()) { return null; } else { return binaryRead(XOR.getRMOpcode(size), size, value, access); } } private ComplexMatchResult emitMemoryConsumer(WriteNode write, AMD64Assembler.AMD64BinaryArithmetic arithmeticOp, ReadNode read, ValueNode value) { if (getMemoryKind(write).isInteger() && !write.canDeoptimize() && !write.ordersMemoryAccesses() && !read.canDeoptimize()) { OperandSize size = getMemorySize(write); if (write.getAddress() == read.getAddress()) { if (value.isJavaConstant()) { long valueCst = value.asJavaConstant().asLong(); if (NumUtil.isInt(valueCst)) { AMD64Assembler.AMD64MOp mop = AMD64ArithmeticLIRGenerator.getMOp(arithmeticOp, size, (int) valueCst); if (mop != null) { return builder -> { AMD64AddressValue addressValue = (AMD64AddressValue) operand(write.getAddress()); builder.append(new AMD64UnaryConsumer.MemoryOp(mop, size, addressValue)); return null; }; } else { return builder -> { AMD64AddressValue addressValue = (AMD64AddressValue) operand(write.getAddress()); builder.append(new AMD64BinaryConsumer.MemoryConstOp(arithmeticOp.getMIOpcode(size, NumUtil.isByte(valueCst)), size, addressValue, (int) valueCst, state(write))); return null; }; } } } return builder -> { AMD64AddressValue addressValue = (AMD64AddressValue) operand(write.getAddress()); builder.append(new AMD64BinaryConsumer.MemoryMROp(arithmeticOp.getMROpcode(size), size, addressValue, builder.getLIRGeneratorTool().asAllocatable(operand(value)), state(write))); return null; }; } } return null; } @MatchRule("(Write=write object (Add Read=read value))") @MatchRule("(SideEffectFreeWrite=write object (Add Read=read value))") public ComplexMatchResult addToMemory(WriteNode write, ReadNode read, ValueNode value) { return emitMemoryConsumer(write, ADD, read, value); } @MatchRule("(Write=write object (Sub Read=read value))") public ComplexMatchResult subToMemory(WriteNode write, ReadNode read, ValueNode value) { return emitMemoryConsumer(write, SUB, read, value); } @MatchRule("(Write=write object (Or Read=read value))") public ComplexMatchResult orToMemory(WriteNode write, ReadNode read, ValueNode value) { return emitMemoryConsumer(write, OR, read, value); } @MatchRule("(Write=write object (Xor Read=read value))") public ComplexMatchResult xorToMemory(WriteNode write, ReadNode read, ValueNode value) { return emitMemoryConsumer(write, XOR, read, value); } @MatchRule("(Write object Narrow=narrow)") public ComplexMatchResult writeNarrow(WriteNode root, NarrowNode narrow) { return builder -> { LIRKind writeKind = getLIRGeneratorTool().getLIRKind(root.value().stamp(NodeView.DEFAULT)); Value input = operand(narrow.getValue()); LIRKind inputKind = LIRKind.combine(input).changeType(writeKind.getPlatformKind()); Value narrowed = new CastValue(inputKind, getLIRGeneratorTool().asAllocatable(input)); getArithmeticLIRGenerator().emitStore(writeKind, operand(root.getAddress()), narrowed, state(root), root.getMemoryOrder()); return null; }; } @MatchRule("(SignExtend Read=access)") public ComplexMatchResult signExtend(SignExtendNode root, LIRLowerableAccess access) { return emitSignExtendMemory(access, root.getInputBits(), root.getResultBits(), null); } @MatchRule("(ZeroExtend Read=access)") public ComplexMatchResult zeroExtend(ZeroExtendNode root, LIRLowerableAccess access) { AMD64Kind memoryKind = getMemoryKind(access); return builder -> getArithmeticLIRGenerator().emitZeroExtendMemory(memoryKind, root.getResultBits(), (AMD64AddressValue) operand(access.getAddress()), getState(access)); } @MatchRule("(Narrow Read=access)") public ComplexMatchResult narrowRead(NarrowNode root, LIRLowerableAccess access) { return new ComplexMatchResult() { @Override public Value evaluate(NodeLIRBuilder builder) { AMD64AddressValue address = (AMD64AddressValue) operand(access.getAddress()); LIRKind addressKind = LIRKind.combineDerived(getLIRGeneratorTool().getLIRKind(root.asNode().stamp(NodeView.DEFAULT)), address.getBase(), address.getIndex()); AMD64AddressValue newAddress = address.withKind(addressKind); LIRKind readKind = getLIRGeneratorTool().getLIRKind(root.stamp(NodeView.DEFAULT)); return getArithmeticLIRGenerator().emitZeroExtendMemory((AMD64Kind) readKind.getPlatformKind(), root.getResultBits(), newAddress, getState(access)); } }; } @MatchRule("(SignExtend (Narrow=narrow Read=access))") public ComplexMatchResult signExtendNarrowRead(SignExtendNode root, NarrowNode narrow, LIRLowerableAccess access) { LIRKind kind = getLIRGeneratorTool().getLIRKind(narrow.stamp(NodeView.DEFAULT)); return emitSignExtendMemory(access, narrow.getResultBits(), root.getResultBits(), kind); } @MatchRule("(FloatConvert Read=access)") public ComplexMatchResult floatConvert(FloatConvertNode root, LIRLowerableAccess access) { if (root.getFloatConvert().getCategory().equals(FloatConvertCategory.FloatingPointToInteger) && (root.inputCanBeNaN() || root.canOverflow())) { /* We need to fix up the result of the conversion, the input should be in a register. */ return null; } switch (root.getFloatConvert()) { case D2F: return emitConvertMemoryOp(AMD64Kind.SINGLE, SSEOp.CVTSD2SS, SD, access); case D2I: return emitConvertMemoryOp(AMD64Kind.DWORD, SSEOp.CVTTSD2SI, DWORD, access); case D2L: return emitConvertMemoryOp(AMD64Kind.QWORD, SSEOp.CVTTSD2SI, QWORD, access); case F2D: return emitConvertMemoryOp(AMD64Kind.DOUBLE, SSEOp.CVTSS2SD, SS, access); case F2I: return emitConvertMemoryOp(AMD64Kind.DWORD, SSEOp.CVTTSS2SI, DWORD, access); case F2L: return emitConvertMemoryOp(AMD64Kind.QWORD, SSEOp.CVTTSS2SI, QWORD, access); case I2D: return emitConvertMemoryOp(AMD64Kind.DOUBLE, SSEOp.CVTSI2SD, DWORD, access); case I2F: return emitConvertMemoryOp(AMD64Kind.SINGLE, SSEOp.CVTSI2SS, DWORD, access); case L2D: return emitConvertMemoryOp(AMD64Kind.DOUBLE, SSEOp.CVTSI2SD, QWORD, access); case L2F: return emitConvertMemoryOp(AMD64Kind.SINGLE, SSEOp.CVTSI2SS, QWORD, access); default: throw GraalError.shouldNotReachHereUnexpectedValue(root.getFloatConvert()); // ExcludeFromJacocoGeneratedReport } } @MatchRule("(Reinterpret Read=access)") public ComplexMatchResult reinterpret(ReinterpretNode root, LIRLowerableAccess access) { return builder -> { LIRKind kind = getLIRGeneratorTool().getLIRKind(root.stamp(NodeView.DEFAULT)); return emitReinterpretMemory(kind, access); }; } @MatchRule("(Write object Reinterpret=reinterpret)") public ComplexMatchResult writeReinterpret(WriteNode root, ReinterpretNode reinterpret) { return builder -> { LIRKind kind = getLIRGeneratorTool().getLIRKind(reinterpret.getValue().stamp(NodeView.DEFAULT)); AllocatableValue value = getLIRGeneratorTool().asAllocatable(operand(reinterpret.getValue())); AMD64AddressValue address = (AMD64AddressValue) operand(root.getAddress()); getArithmeticLIRGenerator().emitStore(kind, address, value, getState(root), root.getMemoryOrder()); return null; }; } @MatchRule("(Conditional (IntegerBelow x y) Constant=cm1 (Conditional (IntegerEquals x y) Constant=c0 Constant=c1))") public ComplexMatchResult normalizedIntegerCompare(ValueNode x, ValueNode y, ConstantNode cm1, ConstantNode c0, ConstantNode c1) { if (cm1.getStackKind() == JavaKind.Int && cm1.asJavaConstant().asInt() == -1 && c0.getStackKind() == JavaKind.Int && c0.asJavaConstant().asInt() == 0 && c1.getStackKind() == JavaKind.Int && c1.asJavaConstant().asInt() == 1) { GraalError.guarantee(PrimitiveStamp.getBits(x.stamp(NodeView.DEFAULT)) == PrimitiveStamp.getBits(y.stamp(NodeView.DEFAULT)), "need compatible inputs: %s, %s", x, y); return builder -> { LIRKind compareKind = gen.getLIRKind(x.stamp(NodeView.DEFAULT)); return getArithmeticLIRGenerator().emitNormalizedUnsignedCompare(compareKind, operand(x), operand(y)); }; } return null; } @Override public AMD64LIRGenerator getLIRGeneratorTool() { return (AMD64LIRGenerator) gen; } protected AMD64ArithmeticLIRGenerator getArithmeticLIRGenerator() { return (AMD64ArithmeticLIRGenerator) getLIRGeneratorTool().getArithmetic(); } }
apache/commons-math
36,789
commons-math-legacy/src/main/java/org/apache/commons/math4/legacy/linear/EigenDecomposition.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.math4.legacy.linear; import org.apache.commons.numbers.complex.Complex; import org.apache.commons.numbers.core.Precision; import org.apache.commons.math4.legacy.exception.DimensionMismatchException; import org.apache.commons.math4.legacy.exception.MathArithmeticException; import org.apache.commons.math4.legacy.exception.MathUnsupportedOperationException; import org.apache.commons.math4.legacy.exception.MaxCountExceededException; import org.apache.commons.math4.legacy.exception.util.LocalizedFormats; import org.apache.commons.math4.core.jdkmath.JdkMath; /** * Calculates the eigen decomposition of a real matrix. * <p> * The eigen decomposition of matrix A is a set of two matrices: * V and D such that A = V &times; D &times; V<sup>T</sup>. * A, V and D are all m &times; m matrices. * <p> * This class is similar in spirit to the {@code EigenvalueDecomposition} * class from the <a href="http://math.nist.gov/javanumerics/jama/">JAMA</a> * library, with the following changes: * <ul> * <li>a {@link #getVT() getVt} method has been added,</li> * <li>two {@link #getRealEigenvalue(int) getRealEigenvalue} and * {@link #getImagEigenvalue(int) getImagEigenvalue} methods to pick up a * single eigenvalue have been added,</li> * <li>a {@link #getEigenvector(int) getEigenvector} method to pick up a * single eigenvector has been added,</li> * <li>a {@link #getDeterminant() getDeterminant} method has been added.</li> * <li>a {@link #getSolver() getSolver} method has been added.</li> * </ul> * <p> * As of 3.1, this class supports general real matrices (both symmetric and non-symmetric): * <p> * If A is symmetric, then A = V*D*V' where the eigenvalue matrix D is diagonal * and the eigenvector matrix V is orthogonal, i.e. * {@code A = V.multiply(D.multiply(V.transpose()))} and * {@code V.multiply(V.transpose())} equals the identity matrix. * </p> * <p> * If A is not symmetric, then the eigenvalue matrix D is block diagonal with the real * eigenvalues in 1-by-1 blocks and any complex eigenvalues, lambda + i*mu, in 2-by-2 * blocks: * <pre> * [lambda, mu ] * [ -mu, lambda] * </pre> * The columns of V represent the eigenvectors in the sense that {@code A*V = V*D}, * i.e. A.multiply(V) equals V.multiply(D). * The matrix V may be badly conditioned, or even singular, so the validity of the * equation {@code A = V*D*inverse(V)} depends upon the condition of V. * <p> * This implementation is based on the paper by A. Drubrulle, R.S. Martin and * J.H. Wilkinson "The Implicit QL Algorithm" in Wilksinson and Reinsch (1971) * Handbook for automatic computation, vol. 2, Linear algebra, Springer-Verlag, * New-York. * * @see <a href="http://mathworld.wolfram.com/EigenDecomposition.html">MathWorld</a> * @see <a href="http://en.wikipedia.org/wiki/Eigendecomposition_of_a_matrix">Wikipedia</a> * @since 2.0 (changed to concrete class in 3.0) */ public class EigenDecomposition { /** Internally used epsilon criteria. */ private static final double EPSILON = 1e-12; /** Maximum number of iterations accepted in the implicit QL transformation. */ private static final byte MAX_ITER = 30; /** Main diagonal of the tridiagonal matrix. */ private double[] main; /** Secondary diagonal of the tridiagonal matrix. */ private double[] secondary; /** * Transformer to tridiagonal (may be null if matrix is already * tridiagonal). */ private TriDiagonalTransformer transformer; /** Real part of the realEigenvalues. */ private double[] realEigenvalues; /** Imaginary part of the realEigenvalues. */ private double[] imagEigenvalues; /** Eigenvectors. */ private ArrayRealVector[] eigenvectors; /** Cached value of V. */ private RealMatrix cachedV; /** Cached value of D. */ private RealMatrix cachedD; /** Cached value of Vt. */ private RealMatrix cachedVt; /** Whether the matrix is symmetric. */ private final boolean isSymmetric; /** * Calculates the eigen decomposition of the given real matrix. * <p> * Supports decomposition of a general matrix since 3.1. * * @param matrix Matrix to decompose. * @throws MaxCountExceededException if the algorithm fails to converge. * @throws MathArithmeticException if the decomposition of a general matrix * results in a matrix with zero norm * @since 3.1 */ public EigenDecomposition(final RealMatrix matrix) throws MathArithmeticException { final double symTol = 10 * matrix.getRowDimension() * matrix.getColumnDimension() * Precision.EPSILON; isSymmetric = MatrixUtils.isSymmetric(matrix, symTol); if (isSymmetric) { transformToTridiagonal(matrix); findEigenVectors(transformer.getQ().getData()); } else { final SchurTransformer t = transformToSchur(matrix); findEigenVectorsFromSchur(t); } } /** * Calculates the eigen decomposition of the symmetric tridiagonal * matrix. The Householder matrix is assumed to be the identity matrix. * * @param main Main diagonal of the symmetric tridiagonal form. * @param secondary Secondary of the tridiagonal form. * @throws MaxCountExceededException if the algorithm fails to converge. * @since 3.1 */ public EigenDecomposition(final double[] main, final double[] secondary) { isSymmetric = true; this.main = main.clone(); this.secondary = secondary.clone(); transformer = null; final int size = main.length; final double[][] z = new double[size][size]; for (int i = 0; i < size; i++) { z[i][i] = 1.0; } findEigenVectors(z); } /** * Gets the matrix V of the decomposition. * V is an orthogonal matrix, i.e. its transpose is also its inverse. * The columns of V are the eigenvectors of the original matrix. * No assumption is made about the orientation of the system axes formed * by the columns of V (e.g. in a 3-dimension space, V can form a left- * or right-handed system). * * @return the V matrix. */ public RealMatrix getV() { if (cachedV == null) { final int m = eigenvectors.length; cachedV = MatrixUtils.createRealMatrix(m, m); for (int k = 0; k < m; ++k) { cachedV.setColumnVector(k, eigenvectors[k]); } } // return the cached matrix return cachedV; } /** * Gets the block diagonal matrix D of the decomposition. * D is a block diagonal matrix. * Real eigenvalues are on the diagonal while complex values are on * 2x2 blocks { {real +imaginary}, {-imaginary, real} }. * * @return the D matrix. * * @see #getRealEigenvalues() * @see #getImagEigenvalues() */ public RealMatrix getD() { if (cachedD == null) { // cache the matrix for subsequent calls cachedD = MatrixUtils.createRealMatrixWithDiagonal(realEigenvalues); for (int i = 0; i < imagEigenvalues.length; i++) { if (Precision.compareTo(imagEigenvalues[i], 0.0, EPSILON) > 0) { cachedD.setEntry(i, i+1, imagEigenvalues[i]); } else if (Precision.compareTo(imagEigenvalues[i], 0.0, EPSILON) < 0) { cachedD.setEntry(i, i-1, imagEigenvalues[i]); } } } return cachedD; } /** * Gets the transpose of the matrix V of the decomposition. * V is an orthogonal matrix, i.e. its transpose is also its inverse. * The columns of V are the eigenvectors of the original matrix. * No assumption is made about the orientation of the system axes formed * by the columns of V (e.g. in a 3-dimension space, V can form a left- * or right-handed system). * * @return the transpose of the V matrix. */ public RealMatrix getVT() { if (cachedVt == null) { final int m = eigenvectors.length; cachedVt = MatrixUtils.createRealMatrix(m, m); for (int k = 0; k < m; ++k) { cachedVt.setRowVector(k, eigenvectors[k]); } } // return the cached matrix return cachedVt; } /** * Returns whether the calculated eigen values are complex or real. * <p>The method performs a zero check for each element of the * {@link #getImagEigenvalues()} array and returns {@code true} if any * element is not equal to zero. * * @return {@code true} if the eigen values are complex, {@code false} otherwise * @since 3.1 */ public boolean hasComplexEigenvalues() { for (int i = 0; i < imagEigenvalues.length; i++) { if (!Precision.equals(imagEigenvalues[i], 0.0, EPSILON)) { return true; } } return false; } /** * Gets a copy of the real parts of the eigenvalues of the original matrix. * * @return a copy of the real parts of the eigenvalues of the original matrix. * * @see #getD() * @see #getRealEigenvalue(int) * @see #getImagEigenvalues() */ public double[] getRealEigenvalues() { return realEigenvalues.clone(); } /** * Returns the real part of the i<sup>th</sup> eigenvalue of the original * matrix. * * @param i index of the eigenvalue (counting from 0) * @return real part of the i<sup>th</sup> eigenvalue of the original * matrix. * * @see #getD() * @see #getRealEigenvalues() * @see #getImagEigenvalue(int) */ public double getRealEigenvalue(final int i) { return realEigenvalues[i]; } /** * Gets a copy of the imaginary parts of the eigenvalues of the original * matrix. * * @return a copy of the imaginary parts of the eigenvalues of the original * matrix. * * @see #getD() * @see #getImagEigenvalue(int) * @see #getRealEigenvalues() */ public double[] getImagEigenvalues() { return imagEigenvalues.clone(); } /** * Gets the imaginary part of the i<sup>th</sup> eigenvalue of the original * matrix. * * @param i Index of the eigenvalue (counting from 0). * @return the imaginary part of the i<sup>th</sup> eigenvalue of the original * matrix. * * @see #getD() * @see #getImagEigenvalues() * @see #getRealEigenvalue(int) */ public double getImagEigenvalue(final int i) { return imagEigenvalues[i]; } /** * Gets a copy of the i<sup>th</sup> eigenvector of the original matrix. * * @param i Index of the eigenvector (counting from 0). * @return a copy of the i<sup>th</sup> eigenvector of the original matrix. * @see #getD() */ public RealVector getEigenvector(final int i) { return eigenvectors[i].copy(); } /** * Computes the determinant of the matrix. * * @return the determinant of the matrix. */ public double getDeterminant() { double determinant = 1; for (double lambda : realEigenvalues) { determinant *= lambda; } return determinant; } /** * Computes the square-root of the matrix. * This implementation assumes that the matrix is symmetric and positive * definite. * * @return the square-root of the matrix. * @throws MathUnsupportedOperationException if the matrix is not * symmetric or not positive definite. * @since 3.1 */ public RealMatrix getSquareRoot() { if (!isSymmetric) { throw new MathUnsupportedOperationException(); } final double[] sqrtEigenValues = new double[realEigenvalues.length]; for (int i = 0; i < realEigenvalues.length; i++) { final double eigen = realEigenvalues[i]; if (eigen <= 0) { throw new MathUnsupportedOperationException(); } sqrtEigenValues[i] = JdkMath.sqrt(eigen); } final RealMatrix sqrtEigen = MatrixUtils.createRealDiagonalMatrix(sqrtEigenValues); final RealMatrix v = getV(); final RealMatrix vT = getVT(); return v.multiply(sqrtEigen).multiply(vT); } /** * Gets a solver for finding the A &times; X = B solution in exact * linear sense. * <p> * Since 3.1, eigen decomposition of a general matrix is supported, * but the {@link DecompositionSolver} only supports real eigenvalues. * * @return a solver * @throws MathUnsupportedOperationException if the decomposition resulted in * complex eigenvalues */ public DecompositionSolver getSolver() { if (hasComplexEigenvalues()) { throw new MathUnsupportedOperationException(); } return new Solver(realEigenvalues, imagEigenvalues, eigenvectors); } /** Specialized solver. */ private static final class Solver implements DecompositionSolver { /** Real part of the realEigenvalues. */ private final double[] realEigenvalues; /** Imaginary part of the realEigenvalues. */ private final double[] imagEigenvalues; /** Eigenvectors. */ private final ArrayRealVector[] eigenvectors; /** * Builds a solver from decomposed matrix. * * @param realEigenvalues Real parts of the eigenvalues. * @param imagEigenvalues Imaginary parts of the eigenvalues. * @param eigenvectors Eigenvectors. */ private Solver(final double[] realEigenvalues, final double[] imagEigenvalues, final ArrayRealVector[] eigenvectors) { this.realEigenvalues = realEigenvalues; this.imagEigenvalues = imagEigenvalues; this.eigenvectors = eigenvectors; } /** * Solves the linear equation A &times; X = B for symmetric matrices A. * <p> * This method only finds exact linear solutions, i.e. solutions for * which ||A &times; X - B|| is exactly 0. * </p> * * @param b Right-hand side of the equation A &times; X = B. * @return a Vector X that minimizes the two norm of A &times; X - B. * * @throws DimensionMismatchException if the matrices dimensions do not match. * @throws SingularMatrixException if the decomposed matrix is singular. */ @Override public RealVector solve(final RealVector b) { if (!isNonSingular()) { throw new SingularMatrixException(); } final int m = realEigenvalues.length; if (b.getDimension() != m) { throw new DimensionMismatchException(b.getDimension(), m); } final double[] bp = new double[m]; for (int i = 0; i < m; ++i) { final ArrayRealVector v = eigenvectors[i]; final double[] vData = v.getDataRef(); final double s = v.dotProduct(b) / realEigenvalues[i]; for (int j = 0; j < m; ++j) { bp[j] += s * vData[j]; } } return new ArrayRealVector(bp, false); } /** {@inheritDoc} */ @Override public RealMatrix solve(RealMatrix b) { if (!isNonSingular()) { throw new SingularMatrixException(); } final int m = realEigenvalues.length; if (b.getRowDimension() != m) { throw new DimensionMismatchException(b.getRowDimension(), m); } final int nColB = b.getColumnDimension(); final double[][] bp = new double[m][nColB]; final double[] tmpCol = new double[m]; for (int k = 0; k < nColB; ++k) { for (int i = 0; i < m; ++i) { tmpCol[i] = b.getEntry(i, k); bp[i][k] = 0; } for (int i = 0; i < m; ++i) { final ArrayRealVector v = eigenvectors[i]; final double[] vData = v.getDataRef(); double s = 0; for (int j = 0; j < m; ++j) { s += v.getEntry(j) * tmpCol[j]; } s /= realEigenvalues[i]; for (int j = 0; j < m; ++j) { bp[j][k] += s * vData[j]; } } } return new Array2DRowRealMatrix(bp, false); } /** * Checks whether the decomposed matrix is non-singular. * * @return true if the decomposed matrix is non-singular. */ @Override public boolean isNonSingular() { double largestEigenvalueNorm = 0.0; // Looping over all values (in case they are not sorted in decreasing // order of their norm). for (int i = 0; i < realEigenvalues.length; ++i) { largestEigenvalueNorm = JdkMath.max(largestEigenvalueNorm, eigenvalueNorm(i)); } // Corner case: zero matrix, all exactly 0 eigenvalues if (largestEigenvalueNorm == 0.0) { return false; } for (int i = 0; i < realEigenvalues.length; ++i) { // Looking for eigenvalues that are 0, where we consider anything much much smaller // than the largest eigenvalue to be effectively 0. if (Precision.equals(eigenvalueNorm(i) / largestEigenvalueNorm, 0, EPSILON)) { return false; } } return true; } /** * @param i which eigenvalue to find the norm of * @return the norm of ith (complex) eigenvalue. */ private double eigenvalueNorm(int i) { final double re = realEigenvalues[i]; final double im = imagEigenvalues[i]; return JdkMath.sqrt(re * re + im * im); } /** * Get the inverse of the decomposed matrix. * * @return the inverse matrix. * @throws SingularMatrixException if the decomposed matrix is singular. */ @Override public RealMatrix getInverse() { if (!isNonSingular()) { throw new SingularMatrixException(); } final int m = realEigenvalues.length; final double[][] invData = new double[m][m]; for (int i = 0; i < m; ++i) { final double[] invI = invData[i]; for (int j = 0; j < m; ++j) { double invIJ = 0; for (int k = 0; k < m; ++k) { final double[] vK = eigenvectors[k].getDataRef(); invIJ += vK[i] * vK[j] / realEigenvalues[k]; } invI[j] = invIJ; } } return MatrixUtils.createRealMatrix(invData); } } /** * Transforms the matrix to tridiagonal form. * * @param matrix Matrix to transform. */ private void transformToTridiagonal(final RealMatrix matrix) { // transform the matrix to tridiagonal transformer = new TriDiagonalTransformer(matrix); main = transformer.getMainDiagonalRef(); secondary = transformer.getSecondaryDiagonalRef(); } /** * Find eigenvalues and eigenvectors (Dubrulle et al., 1971). * * @param householderMatrix Householder matrix of the transformation * to tridiagonal form. */ private void findEigenVectors(final double[][] householderMatrix) { final double[][]z = householderMatrix.clone(); final int n = main.length; realEigenvalues = new double[n]; imagEigenvalues = new double[n]; final double[] e = new double[n]; for (int i = 0; i < n - 1; i++) { realEigenvalues[i] = main[i]; e[i] = secondary[i]; } realEigenvalues[n - 1] = main[n - 1]; e[n - 1] = 0; // Determine the largest main and secondary value in absolute term. double maxAbsoluteValue = 0; for (int i = 0; i < n; i++) { if (JdkMath.abs(realEigenvalues[i]) > maxAbsoluteValue) { maxAbsoluteValue = JdkMath.abs(realEigenvalues[i]); } if (JdkMath.abs(e[i]) > maxAbsoluteValue) { maxAbsoluteValue = JdkMath.abs(e[i]); } } // Make null any main and secondary value too small to be significant if (maxAbsoluteValue != 0) { for (int i=0; i < n; i++) { if (JdkMath.abs(realEigenvalues[i]) <= Precision.EPSILON * maxAbsoluteValue) { realEigenvalues[i] = 0; } if (JdkMath.abs(e[i]) <= Precision.EPSILON * maxAbsoluteValue) { e[i]=0; } } } for (int j = 0; j < n; j++) { int its = 0; int m; do { for (m = j; m < n - 1; m++) { double delta = JdkMath.abs(realEigenvalues[m]) + JdkMath.abs(realEigenvalues[m + 1]); if (JdkMath.abs(e[m]) + delta == delta) { break; } } if (m != j) { if (its == MAX_ITER) { throw new MaxCountExceededException(LocalizedFormats.CONVERGENCE_FAILED, MAX_ITER); } its++; double q = (realEigenvalues[j + 1] - realEigenvalues[j]) / (2 * e[j]); double t = JdkMath.sqrt(1 + q * q); if (q < 0.0) { q = realEigenvalues[m] - realEigenvalues[j] + e[j] / (q - t); } else { q = realEigenvalues[m] - realEigenvalues[j] + e[j] / (q + t); } double u = 0.0; double s = 1.0; double c = 1.0; int i; for (i = m - 1; i >= j; i--) { double p = s * e[i]; double h = c * e[i]; if (JdkMath.abs(p) >= JdkMath.abs(q)) { c = q / p; t = JdkMath.sqrt(c * c + 1.0); e[i + 1] = p * t; s = 1.0 / t; c *= s; } else { s = p / q; t = JdkMath.sqrt(s * s + 1.0); e[i + 1] = q * t; c = 1.0 / t; s *= c; } if (e[i + 1] == 0.0) { realEigenvalues[i + 1] -= u; e[m] = 0.0; break; } q = realEigenvalues[i + 1] - u; t = (realEigenvalues[i] - q) * s + 2.0 * c * h; u = s * t; realEigenvalues[i + 1] = q + u; q = c * t - h; for (int ia = 0; ia < n; ia++) { p = z[ia][i + 1]; z[ia][i + 1] = s * z[ia][i] + c * p; z[ia][i] = c * z[ia][i] - s * p; } } if (t == 0.0 && i >= j) { continue; } realEigenvalues[j] -= u; e[j] = q; e[m] = 0.0; } } while (m != j); } //Sort the eigen values (and vectors) in increase order for (int i = 0; i < n; i++) { int k = i; double p = realEigenvalues[i]; for (int j = i + 1; j < n; j++) { if (realEigenvalues[j] > p) { k = j; p = realEigenvalues[j]; } } if (k != i) { realEigenvalues[k] = realEigenvalues[i]; realEigenvalues[i] = p; for (int j = 0; j < n; j++) { p = z[j][i]; z[j][i] = z[j][k]; z[j][k] = p; } } } // Determine the largest eigen value in absolute term. maxAbsoluteValue = 0; for (int i = 0; i < n; i++) { if (JdkMath.abs(realEigenvalues[i]) > maxAbsoluteValue) { maxAbsoluteValue=JdkMath.abs(realEigenvalues[i]); } } // Make null any eigen value too small to be significant if (maxAbsoluteValue != 0.0) { for (int i=0; i < n; i++) { if (JdkMath.abs(realEigenvalues[i]) < Precision.EPSILON * maxAbsoluteValue) { realEigenvalues[i] = 0; } } } eigenvectors = new ArrayRealVector[n]; final double[] tmp = new double[n]; for (int i = 0; i < n; i++) { for (int j = 0; j < n; j++) { tmp[j] = z[j][i]; } eigenvectors[i] = new ArrayRealVector(tmp); } } /** * Transforms the matrix to Schur form and calculates the eigenvalues. * * @param matrix Matrix to transform. * @return the {@link SchurTransformer Shur transform} for this matrix */ private SchurTransformer transformToSchur(final RealMatrix matrix) { final SchurTransformer schurTransform = new SchurTransformer(matrix); final double[][] matT = schurTransform.getT().getData(); realEigenvalues = new double[matT.length]; imagEigenvalues = new double[matT.length]; for (int i = 0; i < realEigenvalues.length; i++) { if (i == (realEigenvalues.length - 1) || Precision.equals(matT[i + 1][i], 0.0, EPSILON)) { realEigenvalues[i] = matT[i][i]; } else { final double x = matT[i + 1][i + 1]; final double p = 0.5 * (matT[i][i] - x); final double z = JdkMath.sqrt(JdkMath.abs(p * p + matT[i + 1][i] * matT[i][i + 1])); realEigenvalues[i] = x + p; imagEigenvalues[i] = z; realEigenvalues[i + 1] = x + p; imagEigenvalues[i + 1] = -z; i++; } } return schurTransform; } /** * Performs a division of two complex numbers. * * @param xr real part of the first number * @param xi imaginary part of the first number * @param yr real part of the second number * @param yi imaginary part of the second number * @return result of the complex division */ private Complex cdiv(final double xr, final double xi, final double yr, final double yi) { return Complex.ofCartesian(xr, xi).divide(Complex.ofCartesian(yr, yi)); } /** * Find eigenvectors from a matrix transformed to Schur form. * * @param schur the schur transformation of the matrix * @throws MathArithmeticException if the Schur form has a norm of zero */ private void findEigenVectorsFromSchur(final SchurTransformer schur) throws MathArithmeticException { final double[][] matrixT = schur.getT().getData(); final double[][] matrixP = schur.getP().getData(); final int n = matrixT.length; // compute matrix norm double norm = 0.0; for (int i = 0; i < n; i++) { for (int j = JdkMath.max(i - 1, 0); j < n; j++) { norm += JdkMath.abs(matrixT[i][j]); } } // we can not handle a matrix with zero norm if (Precision.equals(norm, 0.0, EPSILON)) { throw new MathArithmeticException(LocalizedFormats.ZERO_NORM); } // Backsubstitute to find vectors of upper triangular form double r = 0.0; double s = 0.0; double z = 0.0; for (int idx = n - 1; idx >= 0; idx--) { double p = realEigenvalues[idx]; double q = imagEigenvalues[idx]; if (Precision.equals(q, 0.0)) { // Real vector int l = idx; matrixT[idx][idx] = 1.0; for (int i = idx - 1; i >= 0; i--) { double w = matrixT[i][i] - p; r = 0.0; for (int j = l; j <= idx; j++) { r += matrixT[i][j] * matrixT[j][idx]; } if (Precision.compareTo(imagEigenvalues[i], 0.0, EPSILON) < 0) { z = w; s = r; } else { l = i; if (Precision.equals(imagEigenvalues[i], 0.0)) { if (w != 0.0) { matrixT[i][idx] = -r / w; } else { matrixT[i][idx] = -r / (Precision.EPSILON * norm); } } else { // Solve real equations double x = matrixT[i][i + 1]; double y = matrixT[i + 1][i]; q = (realEigenvalues[i] - p) * (realEigenvalues[i] - p) + imagEigenvalues[i] * imagEigenvalues[i]; double t = (x * s - z * r) / q; matrixT[i][idx] = t; if (JdkMath.abs(x) > JdkMath.abs(z)) { matrixT[i + 1][idx] = (-r - w * t) / x; } else { matrixT[i + 1][idx] = (-s - y * t) / z; } } // Overflow control double t = JdkMath.abs(matrixT[i][idx]); if ((Precision.EPSILON * t) * t > 1) { for (int j = i; j <= idx; j++) { matrixT[j][idx] /= t; } } } } } else if (q < 0.0) { // Complex vector int l = idx - 1; // Last vector component imaginary so matrix is triangular if (JdkMath.abs(matrixT[idx][idx - 1]) > JdkMath.abs(matrixT[idx - 1][idx])) { matrixT[idx - 1][idx - 1] = q / matrixT[idx][idx - 1]; matrixT[idx - 1][idx] = -(matrixT[idx][idx] - p) / matrixT[idx][idx - 1]; } else { final Complex result = cdiv(0.0, -matrixT[idx - 1][idx], matrixT[idx - 1][idx - 1] - p, q); matrixT[idx - 1][idx - 1] = result.getReal(); matrixT[idx - 1][idx] = result.getImaginary(); } matrixT[idx][idx - 1] = 0.0; matrixT[idx][idx] = 1.0; for (int i = idx - 2; i >= 0; i--) { double ra = 0.0; double sa = 0.0; for (int j = l; j <= idx; j++) { ra += matrixT[i][j] * matrixT[j][idx - 1]; sa += matrixT[i][j] * matrixT[j][idx]; } double w = matrixT[i][i] - p; if (Precision.compareTo(imagEigenvalues[i], 0.0, EPSILON) < 0) { z = w; r = ra; s = sa; } else { l = i; if (Precision.equals(imagEigenvalues[i], 0.0)) { final Complex c = cdiv(-ra, -sa, w, q); matrixT[i][idx - 1] = c.getReal(); matrixT[i][idx] = c.getImaginary(); } else { // Solve complex equations double x = matrixT[i][i + 1]; double y = matrixT[i + 1][i]; double vr = (realEigenvalues[i] - p) * (realEigenvalues[i] - p) + imagEigenvalues[i] * imagEigenvalues[i] - q * q; final double vi = (realEigenvalues[i] - p) * 2.0 * q; if (Precision.equals(vr, 0.0) && Precision.equals(vi, 0.0)) { vr = Precision.EPSILON * norm * (JdkMath.abs(w) + JdkMath.abs(q) + JdkMath.abs(x) + JdkMath.abs(y) + JdkMath.abs(z)); } final Complex c = cdiv(x * r - z * ra + q * sa, x * s - z * sa - q * ra, vr, vi); matrixT[i][idx - 1] = c.getReal(); matrixT[i][idx] = c.getImaginary(); if (JdkMath.abs(x) > (JdkMath.abs(z) + JdkMath.abs(q))) { matrixT[i + 1][idx - 1] = (-ra - w * matrixT[i][idx - 1] + q * matrixT[i][idx]) / x; matrixT[i + 1][idx] = (-sa - w * matrixT[i][idx] - q * matrixT[i][idx - 1]) / x; } else { final Complex c2 = cdiv(-r - y * matrixT[i][idx - 1], -s - y * matrixT[i][idx], z, q); matrixT[i + 1][idx - 1] = c2.getReal(); matrixT[i + 1][idx] = c2.getImaginary(); } } // Overflow control double t = JdkMath.max(JdkMath.abs(matrixT[i][idx - 1]), JdkMath.abs(matrixT[i][idx])); if ((Precision.EPSILON * t) * t > 1) { for (int j = i; j <= idx; j++) { matrixT[j][idx - 1] /= t; matrixT[j][idx] /= t; } } } } } } // Back transformation to get eigenvectors of original matrix for (int j = n - 1; j >= 0; j--) { for (int i = 0; i <= n - 1; i++) { z = 0.0; for (int k = 0; k <= JdkMath.min(j, n - 1); k++) { z += matrixP[i][k] * matrixT[k][j]; } matrixP[i][j] = z; } } eigenvectors = new ArrayRealVector[n]; final double[] tmp = new double[n]; for (int i = 0; i < n; i++) { for (int j = 0; j < n; j++) { tmp[j] = matrixP[j][i]; } eigenvectors[i] = new ArrayRealVector(tmp); } } }
apache/druid
36,383
integration-tests/src/test/java/org/apache/druid/tests/security/AbstractAuthConfigurationTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.tests.security; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.inject.Inject; import org.apache.calcite.avatica.AvaticaSqlException; import org.apache.druid.guice.annotations.Client; import org.apache.druid.guice.annotations.ExtensionPoint; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.jackson.JacksonUtils; import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.java.util.http.client.CredentialedHttpClient; import org.apache.druid.java.util.http.client.HttpClient; import org.apache.druid.java.util.http.client.auth.BasicCredentials; import org.apache.druid.java.util.http.client.response.StatusResponseHolder; import org.apache.druid.msq.dart.controller.sql.DartSqlEngine; import org.apache.druid.query.QueryContexts; import org.apache.druid.query.http.SqlTaskStatus; import org.apache.druid.server.security.Access; import org.apache.druid.server.security.Action; import org.apache.druid.server.security.Resource; import org.apache.druid.server.security.ResourceAction; import org.apache.druid.server.security.ResourceType; import org.apache.druid.sql.avatica.DruidAvaticaJsonHandler; import org.apache.druid.testing.clients.CoordinatorResourceTestClient; import org.apache.druid.testing.tools.IntegrationTestingConfig; import org.apache.druid.testing.utils.HttpUtil; import org.apache.druid.testing.utils.MsqTestQueryHelper; import org.apache.druid.testing.utils.TestQueryHelper; import org.apache.druid.tests.indexer.AbstractIndexerTest; import org.jboss.netty.handler.codec.http.HttpMethod; import org.jboss.netty.handler.codec.http.HttpResponseStatus; import org.testng.Assert; import org.testng.annotations.Test; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.Statement; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.stream.Collectors; @ExtensionPoint public abstract class AbstractAuthConfigurationTest { private static final Logger LOG = new Logger(AbstractAuthConfigurationTest.class); protected static final String INVALID_NAME = "invalid%2Fname"; protected static final String SYSTEM_SCHEMA_SEGMENTS_RESULTS_RESOURCE = "/results/auth_test_sys_schema_segments.json"; protected static final String SYSTEM_SCHEMA_SERVER_SEGMENTS_RESULTS_RESOURCE = "/results/auth_test_sys_schema_server_segments.json"; protected static final String SYSTEM_SCHEMA_SERVERS_RESULTS_RESOURCE = "/results/auth_test_sys_schema_servers.json"; protected static final String SYSTEM_SCHEMA_TASKS_RESULTS_RESOURCE = "/results/auth_test_sys_schema_tasks.json"; protected static final String SYS_SCHEMA_SEGMENTS_QUERY = "SELECT * FROM sys.segments WHERE datasource IN ('auth_test')"; protected static final String SYS_SCHEMA_SERVERS_QUERY = "SELECT * FROM sys.servers WHERE tier IS NOT NULL"; protected static final String SYS_SCHEMA_SERVER_SEGMENTS_QUERY = "SELECT * FROM sys.server_segments WHERE segment_id LIKE 'auth_test%'"; protected static final String SYS_SCHEMA_TASKS_QUERY = "SELECT * FROM sys.tasks WHERE datasource IN ('auth_test')"; protected static final TypeReference<List<Map<String, Object>>> SYS_SCHEMA_RESULTS_TYPE_REFERENCE = new TypeReference<>() {}; /** * create a ResourceAction set of permissions that can only read a 'auth_test' datasource, for Authorizer * implementations which use ResourceAction pattern matching */ protected static final List<ResourceAction> DATASOURCE_ONLY_PERMISSIONS = Collections.singletonList( new ResourceAction( new Resource("auth_test", ResourceType.DATASOURCE), Action.READ ) ); protected static final List<ResourceAction> DATASOURCE_QUERY_CONTEXT_PERMISSIONS = ImmutableList.of( new ResourceAction( new Resource("auth_test", ResourceType.DATASOURCE), Action.READ ), new ResourceAction( new Resource(QueryContexts.ENGINE, ResourceType.QUERY_CONTEXT), Action.WRITE ), new ResourceAction( new Resource("auth_test_ctx", ResourceType.QUERY_CONTEXT), Action.WRITE ) ); /** * create a ResourceAction set of permissions that can only read 'auth_test' + partial SYSTEM_TABLE, for Authorizer * implementations which use ResourceAction pattern matching */ protected static final List<ResourceAction> DATASOURCE_SYS_PERMISSIONS = ImmutableList.of( new ResourceAction( new Resource("auth_test", ResourceType.DATASOURCE), Action.READ ), new ResourceAction( new Resource("segments", ResourceType.SYSTEM_TABLE), Action.READ ), // test missing state permission but having servers permission new ResourceAction( new Resource("servers", ResourceType.SYSTEM_TABLE), Action.READ ), // test missing state permission but having server_segments permission new ResourceAction( new Resource("server_segments", ResourceType.SYSTEM_TABLE), Action.READ ), new ResourceAction( new Resource("tasks", ResourceType.SYSTEM_TABLE), Action.READ ) ); /** * create a ResourceAction set of permissions that can only read 'auth_test' + STATE + SYSTEM_TABLE read access, for * Authorizer implementations which use ResourceAction pattern matching */ protected static final List<ResourceAction> DATASOURCE_SYS_STATE_PERMISSIONS = ImmutableList.of( new ResourceAction( new Resource("auth_test", ResourceType.DATASOURCE), Action.READ ), new ResourceAction( new Resource(".*", ResourceType.SYSTEM_TABLE), Action.READ ), new ResourceAction( new Resource(".*", ResourceType.STATE), Action.READ ) ); /** * create a ResourceAction set of permissions with only STATE and SYSTEM_TABLE read access, for Authorizer * implementations which use ResourceAction pattern matching */ protected static final List<ResourceAction> STATE_ONLY_PERMISSIONS = ImmutableList.of( new ResourceAction( new Resource(".*", ResourceType.STATE), Action.READ ), new ResourceAction( new Resource(".*", ResourceType.SYSTEM_TABLE), Action.READ ) ); protected enum User { ADMIN("admin", "priest"), DATASOURCE_ONLY_USER("datasourceOnlyUser", "helloworld"), DATASOURCE_AND_CONTEXT_PARAMS_USER("datasourceAndContextParamsUser", "helloworld"), DATASOURCE_AND_SYS_USER("datasourceAndSysUser", "helloworld"), DATASOURCE_WITH_STATE_USER("datasourceWithStateUser", "helloworld"), STATE_ONLY_USER("stateOnlyUser", "helloworld"), INTERNAL_SYSTEM("druid_system", "warlock"); private final String name; private final String password; User(String name, String password) { this.name = name; this.password = password; } public String getName() { return name; } public String getPassword() { return password; } } protected List<Map<String, Object>> adminSegments; protected List<Map<String, Object>> adminTasks; protected List<Map<String, Object>> adminServers; protected List<Map<String, Object>> adminServerSegments; @Inject protected IntegrationTestingConfig config; @Inject protected ObjectMapper jsonMapper; @Inject protected MsqTestQueryHelper msqHelper; @Inject @Client protected HttpClient httpClient; @Inject protected CoordinatorResourceTestClient coordinatorClient; protected Map<User, HttpClient> httpClients; protected abstract void setupDatasourceOnlyUser() throws Exception; protected abstract void setupDatasourceAndContextParamsUser() throws Exception; protected abstract void setupDatasourceAndSysTableUser() throws Exception; protected abstract void setupDatasourceAndSysAndStateUser() throws Exception; protected abstract void setupSysTableAndStateOnlyUser() throws Exception; protected abstract void setupTestSpecificHttpClients() throws Exception; protected abstract String getAuthenticatorName(); protected abstract String getAuthorizerName(); protected abstract String getExpectedAvaticaAuthError(); protected abstract String getExpectedAvaticaAuthzError(); /** * Returns properties for the admin with an invalid password. * Implementations can set any properties for authentication as they need. */ protected abstract Properties getAvaticaConnectionPropertiesForInvalidAdmin(); /** * Returns properties for the given user. * Implementations can set any properties for authentication as they need. * * @see User */ protected abstract Properties getAvaticaConnectionPropertiesForUser(User user); @Test public void test_systemSchemaAccess_admin() throws Exception { final HttpClient adminClient = getHttpClient(User.ADMIN); // check that admin access works on all nodes checkNodeAccess(adminClient); // as admin LOG.info("Checking sys.segments query as admin..."); verifySystemSchemaQuery( adminClient, SYS_SCHEMA_SEGMENTS_QUERY, adminSegments ); LOG.info("Checking sys.servers query as admin..."); verifySystemSchemaServerQuery( adminClient, SYS_SCHEMA_SERVERS_QUERY, getServersWithoutNonConfigurableFields(adminServers) ); LOG.info("Checking sys.server_segments query as admin..."); verifySystemSchemaQuery( adminClient, SYS_SCHEMA_SERVER_SEGMENTS_QUERY, adminServerSegments ); LOG.info("Checking sys.tasks query as admin..."); verifySystemSchemaQuery( adminClient, SYS_SCHEMA_TASKS_QUERY, adminTasks ); } @Test public void test_systemSchemaAccess_datasourceOnlyUser() throws Exception { final HttpClient datasourceOnlyUserClient = getHttpClient(User.DATASOURCE_ONLY_USER); // check that we can access a datasource-permission restricted resource on the broker HttpUtil.makeRequest( datasourceOnlyUserClient, HttpMethod.GET, config.getBrokerUrl() + "/druid/v2/datasources/auth_test", null ); // as user that can only read auth_test LOG.info("Checking sys.segments query as datasourceOnlyUser..."); final String expectedMsg = "{\"Access-Check-Result\":\"" + Access.DEFAULT_ERROR_MESSAGE + "\"}"; verifySystemSchemaQueryFailure( datasourceOnlyUserClient, SYS_SCHEMA_SEGMENTS_QUERY, HttpResponseStatus.FORBIDDEN, expectedMsg ); LOG.info("Checking sys.servers query as datasourceOnlyUser..."); verifySystemSchemaQueryFailure( datasourceOnlyUserClient, SYS_SCHEMA_SERVERS_QUERY, HttpResponseStatus.FORBIDDEN, expectedMsg ); LOG.info("Checking sys.server_segments query as datasourceOnlyUser..."); verifySystemSchemaQueryFailure( datasourceOnlyUserClient, SYS_SCHEMA_SERVER_SEGMENTS_QUERY, HttpResponseStatus.FORBIDDEN, expectedMsg ); LOG.info("Checking sys.tasks query as datasourceOnlyUser..."); verifySystemSchemaQueryFailure( datasourceOnlyUserClient, SYS_SCHEMA_TASKS_QUERY, HttpResponseStatus.FORBIDDEN, expectedMsg ); } @Test public void test_systemSchemaAccess_datasourceAndSysUser() throws Exception { final HttpClient datasourceAndSysUserClient = getHttpClient(User.DATASOURCE_AND_SYS_USER); // check that we can access a datasource-permission restricted resource on the broker HttpUtil.makeRequest( datasourceAndSysUserClient, HttpMethod.GET, config.getBrokerUrl() + "/druid/v2/datasources/auth_test", null ); // as user that can only read auth_test LOG.info("Checking sys.segments query as datasourceAndSysUser..."); verifySystemSchemaQuery( datasourceAndSysUserClient, SYS_SCHEMA_SEGMENTS_QUERY, adminSegments.stream() .filter((segmentEntry) -> "auth_test".equals(segmentEntry.get("datasource"))) .collect(Collectors.toList()) ); LOG.info("Checking sys.servers query as datasourceAndSysUser..."); verifySystemSchemaQueryFailure( datasourceAndSysUserClient, SYS_SCHEMA_SERVERS_QUERY, HttpResponseStatus.FORBIDDEN, "{\"Access-Check-Result\":\"Insufficient permission to view servers: Unauthorized\"}" ); LOG.info("Checking sys.server_segments query as datasourceAndSysUser..."); verifySystemSchemaQueryFailure( datasourceAndSysUserClient, SYS_SCHEMA_SERVER_SEGMENTS_QUERY, HttpResponseStatus.FORBIDDEN, "{\"Access-Check-Result\":\"Insufficient permission to view servers: Unauthorized\"}" ); LOG.info("Checking sys.tasks query as datasourceAndSysUser..."); verifySystemSchemaQuery( datasourceAndSysUserClient, SYS_SCHEMA_TASKS_QUERY, adminTasks.stream() .filter((taskEntry) -> "auth_test".equals(taskEntry.get("datasource"))) .collect(Collectors.toList()) ); } @Test public void test_systemSchemaAccess_datasourceAndSysWithStateUser() throws Exception { final HttpClient datasourceWithStateUserClient = getHttpClient(User.DATASOURCE_WITH_STATE_USER); // check that we can access a state-permission restricted resource on the broker HttpUtil.makeRequest( datasourceWithStateUserClient, HttpMethod.GET, config.getBrokerUrl() + "/status", null ); // as user that can read auth_test and STATE LOG.info("Checking sys.segments query as datasourceWithStateUser..."); verifySystemSchemaQuery( datasourceWithStateUserClient, SYS_SCHEMA_SEGMENTS_QUERY, adminSegments.stream() .filter((segmentEntry) -> "auth_test".equals(segmentEntry.get("datasource"))) .collect(Collectors.toList()) ); LOG.info("Checking sys.servers query as datasourceWithStateUser..."); verifySystemSchemaServerQuery( datasourceWithStateUserClient, SYS_SCHEMA_SERVERS_QUERY, adminServers ); LOG.info("Checking sys.server_segments query as datasourceWithStateUser..."); verifySystemSchemaQuery( datasourceWithStateUserClient, SYS_SCHEMA_SERVER_SEGMENTS_QUERY, adminServerSegments.stream() .filter((serverSegmentEntry) -> ((String) serverSegmentEntry.get("segment_id")).contains( "auth_test")) .collect(Collectors.toList()) ); LOG.info("Checking sys.tasks query as datasourceWithStateUser..."); verifySystemSchemaQuery( datasourceWithStateUserClient, SYS_SCHEMA_TASKS_QUERY, adminTasks.stream() .filter((taskEntry) -> "auth_test".equals(taskEntry.get("datasource"))) .collect(Collectors.toList()) ); } @Test public void test_systemSchemaAccess_stateOnlyUser() throws Exception { final HttpClient stateOnlyUserClient = getHttpClient(User.STATE_ONLY_USER); HttpUtil.makeRequest(stateOnlyUserClient, HttpMethod.GET, config.getBrokerUrl() + "/status", null); // as user that can only read STATE LOG.info("Checking sys.segments query as stateOnlyUser..."); verifySystemSchemaQuery( stateOnlyUserClient, SYS_SCHEMA_SEGMENTS_QUERY, Collections.emptyList() ); LOG.info("Checking sys.servers query as stateOnlyUser..."); verifySystemSchemaServerQuery( stateOnlyUserClient, SYS_SCHEMA_SERVERS_QUERY, adminServers ); LOG.info("Checking sys.server_segments query as stateOnlyUser..."); verifySystemSchemaQuery( stateOnlyUserClient, SYS_SCHEMA_SERVER_SEGMENTS_QUERY, Collections.emptyList() ); LOG.info("Checking sys.tasks query as stateOnlyUser..."); verifySystemSchemaQuery( stateOnlyUserClient, SYS_SCHEMA_TASKS_QUERY, Collections.emptyList() ); } @Test public void test_unsecuredPathWithoutCredentials_allowed() { // check that we are allowed to access unsecured path without credentials. checkUnsecuredCoordinatorLoadQueuePath(httpClient); } @Test public void test_admin_loadStatus() throws Exception { checkLoadStatus(getHttpClient(User.ADMIN)); } @Test public void test_admin_hasNodeAccess() { checkNodeAccess(getHttpClient(User.ADMIN)); } @Test public void test_internalSystemUser_hasNodeAccess() { checkNodeAccess(getHttpClient(User.INTERNAL_SYSTEM)); } @Test public void test_avaticaQuery_broker() { final Properties properties = getAvaticaConnectionPropertiesForAdmin(); testAvaticaQuery(properties, getBrokerAvacticaUrl()); testAvaticaQuery(properties, StringUtils.maybeRemoveTrailingSlash(getBrokerAvacticaUrl())); } @Test public void test_avaticaQuery_router() { final Properties properties = getAvaticaConnectionPropertiesForAdmin(); testAvaticaQuery(properties, getRouterAvacticaUrl()); testAvaticaQuery(properties, StringUtils.maybeRemoveTrailingSlash(getRouterAvacticaUrl())); } @Test public void test_avaticaQueryAuthFailure_broker() throws Exception { final Properties properties = getAvaticaConnectionPropertiesForInvalidAdmin(); testAvaticaAuthFailure(properties, getBrokerAvacticaUrl()); } @Test public void test_avaticaQueryAuthFailure_router() throws Exception { final Properties properties = getAvaticaConnectionPropertiesForInvalidAdmin(); testAvaticaAuthFailure(properties, getRouterAvacticaUrl()); } @Test public void test_avaticaQueryWithContext_datasourceOnlyUser_fail() throws Exception { final Properties properties = getAvaticaConnectionPropertiesForUser(User.DATASOURCE_ONLY_USER); properties.setProperty("auth_test_ctx", "should-be-denied"); testAvaticaAuthzFailure(properties, getRouterAvacticaUrl()); } @Test public void test_avaticaQueryWithContext_datasourceAndContextParamsUser_succeed() { final Properties properties = getAvaticaConnectionPropertiesForUser(User.DATASOURCE_AND_CONTEXT_PARAMS_USER); properties.setProperty("auth_test_ctx", "should-be-allowed"); testAvaticaQuery(properties, getRouterAvacticaUrl()); } @Test public void test_msqQueryWithContext_datasourceOnlyUser_fail() throws Exception { final String query = "select count(*) from auth_test"; makeMSQQueryRequest( getHttpClient(User.DATASOURCE_ONLY_USER), query, ImmutableMap.of("auth_test_ctx", "should-be-denied"), HttpResponseStatus.FORBIDDEN ); } @Test public void test_msqQueryWithContext_datasourceAndContextParamsUser_succeed() throws Exception { final String query = "select count(*) from auth_test"; StatusResponseHolder responseHolder = makeMSQQueryRequest( getHttpClient(User.DATASOURCE_AND_CONTEXT_PARAMS_USER), query, ImmutableMap.of("auth_test_ctx", "should-be-allowed"), HttpResponseStatus.ACCEPTED ); String taskId = jsonMapper.readValue(responseHolder.getContent(), SqlTaskStatus.class).getTaskId(); msqHelper.pollTaskIdForSuccess(taskId); } @Test public void test_dartQueryWithContext_datasourceOnlyUser_fail() throws Exception { final String query = "select count(*) from auth_test"; makeDartQueryRequest( getHttpClient(User.DATASOURCE_ONLY_USER), query, ImmutableMap.of("auth_test_ctx", "should-be-denied"), HttpResponseStatus.FORBIDDEN ); } @Test public void test_dartQueryWithContext_datasourceAndContextParamsUser_succeed() throws Exception { final String query = "select count(*) from auth_test"; makeDartQueryRequest( getHttpClient(User.DATASOURCE_AND_CONTEXT_PARAMS_USER), query, ImmutableMap.of("auth_test_ctx", "should-be-allowed"), HttpResponseStatus.OK ); } @Test public void test_sqlQueryWithContext_datasourceOnlyUser_fail() throws Exception { final String query = "select count(*) from auth_test"; makeSQLQueryRequest( getHttpClient(User.DATASOURCE_ONLY_USER), query, ImmutableMap.of("auth_test_ctx", "should-be-denied"), HttpResponseStatus.FORBIDDEN ); } @Test public void test_sqlQueryWithContext_datasourceAndContextParamsUser_succeed() throws Exception { final String query = "select count(*) from auth_test"; makeSQLQueryRequest( getHttpClient(User.DATASOURCE_AND_CONTEXT_PARAMS_USER), query, ImmutableMap.of("auth_test_ctx", "should-be-allowed"), HttpResponseStatus.OK ); } @Test public void test_admin_optionsRequest() { verifyAdminOptionsRequest(); } @Test public void test_authentication_invalidAuthName_fails() { verifyAuthenticationInvalidAuthNameFails(); } @Test public void test_authorization_invalidAuthName_fails() { verifyAuthorizationInvalidAuthNameFails(); } @Test public void test_groupMappings_invalidAuthName_fails() { verifyGroupMappingsInvalidAuthNameFails(); } @Test public void testMaliciousUser() { verifyMaliciousUser(); } protected HttpClient getHttpClient(User user) { return Preconditions.checkNotNull(httpClients.get(user), "http client for user[%s]", user.getName()); } protected void setupHttpClientsAndUsers() throws Exception { setupHttpClients(); setupDatasourceOnlyUser(); setupDatasourceAndContextParamsUser(); setupDatasourceAndSysTableUser(); setupDatasourceAndSysAndStateUser(); setupSysTableAndStateOnlyUser(); } protected void checkNodeAccess(HttpClient httpClient) { HttpUtil.makeRequest(httpClient, HttpMethod.GET, config.getCoordinatorUrl() + "/status", null); HttpUtil.makeRequest(httpClient, HttpMethod.GET, config.getOverlordUrl() + "/status", null); HttpUtil.makeRequest(httpClient, HttpMethod.GET, config.getBrokerUrl() + "/status", null); HttpUtil.makeRequest(httpClient, HttpMethod.GET, config.getHistoricalUrl() + "/status", null); HttpUtil.makeRequest(httpClient, HttpMethod.GET, config.getRouterUrl() + "/status", null); } protected void checkLoadStatus(HttpClient httpClient) throws Exception { checkLoadStatusSingle(httpClient, config.getCoordinatorUrl()); checkLoadStatusSingle(httpClient, config.getOverlordUrl()); checkLoadStatusSingle(httpClient, config.getBrokerUrl()); checkLoadStatusSingle(httpClient, config.getHistoricalUrl()); checkLoadStatusSingle(httpClient, config.getRouterUrl()); } protected void testOptionsRequests(HttpClient httpClient) { HttpUtil.makeRequest(httpClient, HttpMethod.OPTIONS, config.getCoordinatorUrl() + "/status", null); HttpUtil.makeRequest(httpClient, HttpMethod.OPTIONS, config.getOverlordUrl() + "/status", null); HttpUtil.makeRequest(httpClient, HttpMethod.OPTIONS, config.getBrokerUrl() + "/status", null); HttpUtil.makeRequest(httpClient, HttpMethod.OPTIONS, config.getHistoricalUrl() + "/status", null); HttpUtil.makeRequest(httpClient, HttpMethod.OPTIONS, config.getRouterUrl() + "/status", null); } protected void checkUnsecuredCoordinatorLoadQueuePath(HttpClient client) { HttpUtil.makeRequest(client, HttpMethod.GET, config.getCoordinatorUrl() + "/druid/coordinator/v1/loadqueue", null); } private Properties getAvaticaConnectionPropertiesForAdmin() { return getAvaticaConnectionPropertiesForUser(User.ADMIN); } protected void testAvaticaQuery(Properties connectionProperties, String url) { LOG.info("URL: " + url); try ( Connection connection = DriverManager.getConnection(url, connectionProperties); Statement statement = connection.createStatement()) { statement.setMaxRows(450); String query = "SELECT * FROM INFORMATION_SCHEMA.COLUMNS"; ResultSet resultSet = statement.executeQuery(query); Assert.assertTrue(resultSet.next()); } catch (Exception e) { throw new RuntimeException(e); } } protected void testAvaticaAuthFailure(Properties connectionProperties, String url) throws Exception { testAvaticaAuthFailure(connectionProperties, url, getExpectedAvaticaAuthError()); } protected void testAvaticaAuthzFailure(Properties connectionProperties, String url) throws Exception { testAvaticaAuthFailure(connectionProperties, url, getExpectedAvaticaAuthzError()); } protected void testAvaticaAuthFailure(Properties connectionProperties, String url, String expectedError) throws Exception { LOG.info("URL: " + url); try ( Connection connection = DriverManager.getConnection(url, connectionProperties); Statement statement = connection.createStatement()) { statement.setMaxRows(450); String query = "SELECT * FROM INFORMATION_SCHEMA.COLUMNS"; statement.executeQuery(query); } catch (AvaticaSqlException ase) { Assert.assertEquals( ase.getErrorMessage(), expectedError ); return; } Assert.fail("Test failed, did not get AvaticaSqlException."); } protected void checkLoadStatusSingle(HttpClient httpClient, String baseUrl) throws Exception { StatusResponseHolder holder = HttpUtil.makeRequest( httpClient, HttpMethod.GET, baseUrl + "/druid-ext/basic-security/authentication/loadStatus", null ); String content = holder.getContent(); Map<String, Boolean> loadStatus = jsonMapper.readValue(content, JacksonUtils.TYPE_REFERENCE_MAP_STRING_BOOLEAN); String authenticatorName = getAuthenticatorName(); Assert.assertNotNull(loadStatus.get(authenticatorName)); Assert.assertTrue(loadStatus.get(authenticatorName)); holder = HttpUtil.makeRequest( httpClient, HttpMethod.GET, baseUrl + "/druid-ext/basic-security/authorization/loadStatus", null ); content = holder.getContent(); loadStatus = jsonMapper.readValue(content, JacksonUtils.TYPE_REFERENCE_MAP_STRING_BOOLEAN); String authorizerName = getAuthorizerName(); Assert.assertNotNull(loadStatus.get(authorizerName)); Assert.assertTrue(loadStatus.get(authorizerName)); } protected StatusResponseHolder makeSQLQueryRequest( HttpClient httpClient, String query, HttpResponseStatus expectedStatus ) throws Exception { return makeSQLQueryRequest(httpClient, query, "/druid/v2/sql", ImmutableMap.of(), expectedStatus); } protected StatusResponseHolder makeSQLQueryRequest( HttpClient httpClient, String query, Map<String, Object> context, HttpResponseStatus expectedStatus ) throws Exception { return makeSQLQueryRequest(httpClient, query, "/druid/v2/sql", context, expectedStatus); } protected StatusResponseHolder makeMSQQueryRequest( HttpClient httpClient, String query, Map<String, Object> context, HttpResponseStatus expectedStatus ) throws Exception { return makeSQLQueryRequest(httpClient, query, "/druid/v2/sql/task", context, expectedStatus); } protected StatusResponseHolder makeDartQueryRequest( HttpClient httpClient, String query, Map<String, Object> context, HttpResponseStatus expectedStatus ) throws Exception { final Map<String, Object> dartContext = new HashMap<>(context); dartContext.put(QueryContexts.ENGINE, DartSqlEngine.NAME); return makeSQLQueryRequest(httpClient, query, "/druid/v2/sql", dartContext, expectedStatus); } protected StatusResponseHolder makeSQLQueryRequest( HttpClient httpClient, String query, String path, Map<String, Object> context, HttpResponseStatus expectedStatus ) throws Exception { Map<String, Object> queryMap = ImmutableMap.of( "query", query, "context", context ); return HttpUtil.makeRequestWithExpectedStatus( httpClient, HttpMethod.POST, config.getBrokerUrl() + path, jsonMapper.writeValueAsBytes(queryMap), expectedStatus ); } protected void verifySystemSchemaQueryBase( HttpClient client, String query, List<Map<String, Object>> expectedResults, boolean isServerQuery ) throws Exception { StatusResponseHolder responseHolder = makeSQLQueryRequest(client, query, HttpResponseStatus.OK); String content = responseHolder.getContent(); List<Map<String, Object>> responseMap = jsonMapper.readValue(content, SYS_SCHEMA_RESULTS_TYPE_REFERENCE); if (isServerQuery) { responseMap = getServersWithoutNonConfigurableFields(responseMap); } Assert.assertEquals(responseMap, expectedResults); } protected void verifySystemSchemaQuery( HttpClient client, String query, List<Map<String, Object>> expectedResults ) throws Exception { verifySystemSchemaQueryBase(client, query, expectedResults, false); } protected void verifySystemSchemaServerQuery( HttpClient client, String query, List<Map<String, Object>> expectedResults ) throws Exception { verifySystemSchemaQueryBase(client, query, expectedResults, true); } protected void verifySystemSchemaQueryFailure( HttpClient client, String query, HttpResponseStatus expectedErrorStatus, String expectedErrorMessage ) throws Exception { StatusResponseHolder responseHolder = makeSQLQueryRequest(client, query, expectedErrorStatus); Assert.assertEquals(responseHolder.getStatus(), expectedErrorStatus); Assert.assertEquals(responseHolder.getContent(), expectedErrorMessage); } protected String getBrokerAvacticaUrl() { return "jdbc:avatica:remote:url=" + config.getBrokerUrl() + DruidAvaticaJsonHandler.AVATICA_PATH; } protected String getRouterAvacticaUrl() { return "jdbc:avatica:remote:url=" + config.getRouterUrl() + DruidAvaticaJsonHandler.AVATICA_PATH; } protected void verifyAdminOptionsRequest() { testOptionsRequests(getHttpClient(User.ADMIN)); } protected void verifyAuthenticationInvalidAuthNameFails() { verifyInvalidAuthNameFails(StringUtils.format( "%s/druid-ext/basic-security/authentication/listen/%s", config.getCoordinatorUrl(), INVALID_NAME )); } protected void verifyAuthorizationInvalidAuthNameFails() { verifyInvalidAuthNameFails(StringUtils.format( "%s/druid-ext/basic-security/authorization/listen/users/%s", config.getCoordinatorUrl(), INVALID_NAME )); } protected void verifyGroupMappingsInvalidAuthNameFails() { verifyInvalidAuthNameFails(StringUtils.format( "%s/druid-ext/basic-security/authorization/listen/groupMappings/%s", config.getCoordinatorUrl(), INVALID_NAME )); } protected void verifyInvalidAuthNameFails(String endpoint) { HttpUtil.makeRequestWithExpectedStatus( getHttpClient(User.ADMIN), HttpMethod.POST, endpoint, "SERIALIZED_DATA".getBytes(StandardCharsets.UTF_8), HttpResponseStatus.INTERNAL_SERVER_ERROR ); } protected void verifyMaliciousUser() { String maliciousUsername = "<script>alert('hello')</script>"; HttpClient maliciousClient = new CredentialedHttpClient( new BasicCredentials(maliciousUsername, "noPass"), httpClient ); StatusResponseHolder responseHolder = HttpUtil.makeRequestWithExpectedStatus( maliciousClient, HttpMethod.GET, config.getBrokerUrl() + "/status", null, HttpResponseStatus.UNAUTHORIZED ); String responseContent = responseHolder.getContent(); Assert.assertTrue(responseContent.contains("<tr><th>MESSAGE:</th><td>Unauthorized</td></tr>")); Assert.assertFalse(responseContent.contains(maliciousUsername)); } protected void setupHttpClients() throws Exception { setupCommonHttpClients(); setupTestSpecificHttpClients(); } protected void setupCommonHttpClients() { httpClients = new HashMap<>(); for (User user : User.values()) { httpClients.put(user, setupHttpClientForUser(user.getName(), user.getPassword())); } } /** * Creates a HttpClient with the given user credentials. * Implementations can override this method to return a different implementation of HttpClient * than the basic CredentialedHttpClient. */ protected HttpClient setupHttpClientForUser(String username, String password) { return new CredentialedHttpClient( new BasicCredentials(username, password), httpClient ); } protected void setExpectedSystemSchemaObjects() throws IOException { // initial setup is done now, run the system schema response content tests adminSegments = jsonMapper.readValue( TestQueryHelper.class.getResourceAsStream(SYSTEM_SCHEMA_SEGMENTS_RESULTS_RESOURCE), SYS_SCHEMA_RESULTS_TYPE_REFERENCE ); adminTasks = jsonMapper.readValue( TestQueryHelper.class.getResourceAsStream(SYSTEM_SCHEMA_TASKS_RESULTS_RESOURCE), SYS_SCHEMA_RESULTS_TYPE_REFERENCE ); adminServers = getServersWithoutNonConfigurableFields( jsonMapper.readValue( fillServersTemplate( config, AbstractIndexerTest.getResourceAsString(SYSTEM_SCHEMA_SERVERS_RESULTS_RESOURCE) ), SYS_SCHEMA_RESULTS_TYPE_REFERENCE ) ); adminServerSegments = jsonMapper.readValue( fillSegementServersTemplate( config, AbstractIndexerTest.getResourceAsString(SYSTEM_SCHEMA_SERVER_SEGMENTS_RESULTS_RESOURCE) ), SYS_SCHEMA_RESULTS_TYPE_REFERENCE ); } /** * curr_size on historicals changes because cluster state is not isolated across * different * integration tests, zero it out for consistent test results * version and start_time are not configurable therefore we zero them as well */ protected static List<Map<String, Object>> getServersWithoutNonConfigurableFields(List<Map<String, Object>> servers) { return Lists.transform( servers, (server) -> { Map<String, Object> newServer = new HashMap<>(server); newServer.put("curr_size", 0); newServer.put("start_time", "0"); newServer.put("version", "0.0.0"); return newServer; } ); } protected static String fillSegementServersTemplate(IntegrationTestingConfig config, String template) { return StringUtils.replace(template, "%%HISTORICAL%%", config.getHistoricalInternalHost()); } protected static String fillServersTemplate(IntegrationTestingConfig config, String template) { String json = StringUtils.replace(template, "%%HISTORICAL%%", config.getHistoricalInternalHost()); json = StringUtils.replace(json, "%%BROKER%%", config.getBrokerInternalHost()); return json; } }
googleapis/google-cloud-java
36,509
java-recommender/proto-google-cloud-recommender-v1/src/main/java/com/google/cloud/recommender/v1/InsightStateInfo.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/recommender/v1/insight.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.recommender.v1; /** * * * <pre> * Information related to insight state. * </pre> * * Protobuf type {@code google.cloud.recommender.v1.InsightStateInfo} */ public final class InsightStateInfo extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.recommender.v1.InsightStateInfo) InsightStateInfoOrBuilder { private static final long serialVersionUID = 0L; // Use InsightStateInfo.newBuilder() to construct. private InsightStateInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private InsightStateInfo() { state_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new InsightStateInfo(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.recommender.v1.InsightProto .internal_static_google_cloud_recommender_v1_InsightStateInfo_descriptor; } @SuppressWarnings({"rawtypes"}) @java.lang.Override protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection( int number) { switch (number) { case 2: return internalGetStateMetadata(); default: throw new RuntimeException("Invalid map field number: " + number); } } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.recommender.v1.InsightProto .internal_static_google_cloud_recommender_v1_InsightStateInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.recommender.v1.InsightStateInfo.class, com.google.cloud.recommender.v1.InsightStateInfo.Builder.class); } /** * * * <pre> * Represents insight state. * </pre> * * Protobuf enum {@code google.cloud.recommender.v1.InsightStateInfo.State} */ public enum State implements com.google.protobuf.ProtocolMessageEnum { /** * * * <pre> * Unspecified state. * </pre> * * <code>STATE_UNSPECIFIED = 0;</code> */ STATE_UNSPECIFIED(0), /** * * * <pre> * Insight is active. Content for ACTIVE insights can be updated by Google. * ACTIVE insights can be marked DISMISSED OR ACCEPTED. * </pre> * * <code>ACTIVE = 1;</code> */ ACTIVE(1), /** * * * <pre> * Some action has been taken based on this insight. Insights become * accepted when a recommendation derived from the insight has been marked * CLAIMED, SUCCEEDED, or FAILED. ACTIVE insights can also be marked * ACCEPTED explicitly. Content for ACCEPTED insights is immutable. ACCEPTED * insights can only be marked ACCEPTED (which may update state metadata). * </pre> * * <code>ACCEPTED = 2;</code> */ ACCEPTED(2), /** * * * <pre> * Insight is dismissed. Content for DISMISSED insights can be updated by * Google. DISMISSED insights can be marked as ACTIVE. * </pre> * * <code>DISMISSED = 3;</code> */ DISMISSED(3), UNRECOGNIZED(-1), ; /** * * * <pre> * Unspecified state. * </pre> * * <code>STATE_UNSPECIFIED = 0;</code> */ public static final int STATE_UNSPECIFIED_VALUE = 0; /** * * * <pre> * Insight is active. Content for ACTIVE insights can be updated by Google. * ACTIVE insights can be marked DISMISSED OR ACCEPTED. * </pre> * * <code>ACTIVE = 1;</code> */ public static final int ACTIVE_VALUE = 1; /** * * * <pre> * Some action has been taken based on this insight. Insights become * accepted when a recommendation derived from the insight has been marked * CLAIMED, SUCCEEDED, or FAILED. ACTIVE insights can also be marked * ACCEPTED explicitly. Content for ACCEPTED insights is immutable. ACCEPTED * insights can only be marked ACCEPTED (which may update state metadata). * </pre> * * <code>ACCEPTED = 2;</code> */ public static final int ACCEPTED_VALUE = 2; /** * * * <pre> * Insight is dismissed. Content for DISMISSED insights can be updated by * Google. DISMISSED insights can be marked as ACTIVE. * </pre> * * <code>DISMISSED = 3;</code> */ public static final int DISMISSED_VALUE = 3; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static State valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static State forNumber(int value) { switch (value) { case 0: return STATE_UNSPECIFIED; case 1: return ACTIVE; case 2: return ACCEPTED; case 3: return DISMISSED; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<State> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<State> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<State>() { public State findValueByNumber(int number) { return State.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.cloud.recommender.v1.InsightStateInfo.getDescriptor().getEnumTypes().get(0); } private static final State[] VALUES = values(); public static State valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private State(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.cloud.recommender.v1.InsightStateInfo.State) } public static final int STATE_FIELD_NUMBER = 1; private int state_ = 0; /** * * * <pre> * Insight state. * </pre> * * <code>.google.cloud.recommender.v1.InsightStateInfo.State state = 1;</code> * * @return The enum numeric value on the wire for state. */ @java.lang.Override public int getStateValue() { return state_; } /** * * * <pre> * Insight state. * </pre> * * <code>.google.cloud.recommender.v1.InsightStateInfo.State state = 1;</code> * * @return The state. */ @java.lang.Override public com.google.cloud.recommender.v1.InsightStateInfo.State getState() { com.google.cloud.recommender.v1.InsightStateInfo.State result = com.google.cloud.recommender.v1.InsightStateInfo.State.forNumber(state_); return result == null ? com.google.cloud.recommender.v1.InsightStateInfo.State.UNRECOGNIZED : result; } public static final int STATE_METADATA_FIELD_NUMBER = 2; private static final class StateMetadataDefaultEntryHolder { static final com.google.protobuf.MapEntry<java.lang.String, java.lang.String> defaultEntry = com.google.protobuf.MapEntry.<java.lang.String, java.lang.String>newDefaultInstance( com.google.cloud.recommender.v1.InsightProto .internal_static_google_cloud_recommender_v1_InsightStateInfo_StateMetadataEntry_descriptor, com.google.protobuf.WireFormat.FieldType.STRING, "", com.google.protobuf.WireFormat.FieldType.STRING, ""); } @SuppressWarnings("serial") private com.google.protobuf.MapField<java.lang.String, java.lang.String> stateMetadata_; private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetStateMetadata() { if (stateMetadata_ == null) { return com.google.protobuf.MapField.emptyMapField( StateMetadataDefaultEntryHolder.defaultEntry); } return stateMetadata_; } public int getStateMetadataCount() { return internalGetStateMetadata().getMap().size(); } /** * * * <pre> * A map of metadata for the state, provided by user or automations systems. * </pre> * * <code>map&lt;string, string&gt; state_metadata = 2;</code> */ @java.lang.Override public boolean containsStateMetadata(java.lang.String key) { if (key == null) { throw new NullPointerException("map key"); } return internalGetStateMetadata().getMap().containsKey(key); } /** Use {@link #getStateMetadataMap()} instead. */ @java.lang.Override @java.lang.Deprecated public java.util.Map<java.lang.String, java.lang.String> getStateMetadata() { return getStateMetadataMap(); } /** * * * <pre> * A map of metadata for the state, provided by user or automations systems. * </pre> * * <code>map&lt;string, string&gt; state_metadata = 2;</code> */ @java.lang.Override public java.util.Map<java.lang.String, java.lang.String> getStateMetadataMap() { return internalGetStateMetadata().getMap(); } /** * * * <pre> * A map of metadata for the state, provided by user or automations systems. * </pre> * * <code>map&lt;string, string&gt; state_metadata = 2;</code> */ @java.lang.Override public /* nullable */ java.lang.String getStateMetadataOrDefault( java.lang.String key, /* nullable */ java.lang.String defaultValue) { if (key == null) { throw new NullPointerException("map key"); } java.util.Map<java.lang.String, java.lang.String> map = internalGetStateMetadata().getMap(); return map.containsKey(key) ? map.get(key) : defaultValue; } /** * * * <pre> * A map of metadata for the state, provided by user or automations systems. * </pre> * * <code>map&lt;string, string&gt; state_metadata = 2;</code> */ @java.lang.Override public java.lang.String getStateMetadataOrThrow(java.lang.String key) { if (key == null) { throw new NullPointerException("map key"); } java.util.Map<java.lang.String, java.lang.String> map = internalGetStateMetadata().getMap(); if (!map.containsKey(key)) { throw new java.lang.IllegalArgumentException(); } return map.get(key); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (state_ != com.google.cloud.recommender.v1.InsightStateInfo.State.STATE_UNSPECIFIED.getNumber()) { output.writeEnum(1, state_); } com.google.protobuf.GeneratedMessageV3.serializeStringMapTo( output, internalGetStateMetadata(), StateMetadataDefaultEntryHolder.defaultEntry, 2); getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (state_ != com.google.cloud.recommender.v1.InsightStateInfo.State.STATE_UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, state_); } for (java.util.Map.Entry<java.lang.String, java.lang.String> entry : internalGetStateMetadata().getMap().entrySet()) { com.google.protobuf.MapEntry<java.lang.String, java.lang.String> stateMetadata__ = StateMetadataDefaultEntryHolder.defaultEntry .newBuilderForType() .setKey(entry.getKey()) .setValue(entry.getValue()) .build(); size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, stateMetadata__); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.recommender.v1.InsightStateInfo)) { return super.equals(obj); } com.google.cloud.recommender.v1.InsightStateInfo other = (com.google.cloud.recommender.v1.InsightStateInfo) obj; if (state_ != other.state_) return false; if (!internalGetStateMetadata().equals(other.internalGetStateMetadata())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + STATE_FIELD_NUMBER; hash = (53 * hash) + state_; if (!internalGetStateMetadata().getMap().isEmpty()) { hash = (37 * hash) + STATE_METADATA_FIELD_NUMBER; hash = (53 * hash) + internalGetStateMetadata().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.recommender.v1.InsightStateInfo parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.recommender.v1.InsightStateInfo parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.recommender.v1.InsightStateInfo parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.recommender.v1.InsightStateInfo parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.recommender.v1.InsightStateInfo parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.recommender.v1.InsightStateInfo parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.recommender.v1.InsightStateInfo parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.recommender.v1.InsightStateInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.recommender.v1.InsightStateInfo parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.recommender.v1.InsightStateInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.recommender.v1.InsightStateInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.recommender.v1.InsightStateInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.recommender.v1.InsightStateInfo prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Information related to insight state. * </pre> * * Protobuf type {@code google.cloud.recommender.v1.InsightStateInfo} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.recommender.v1.InsightStateInfo) com.google.cloud.recommender.v1.InsightStateInfoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.recommender.v1.InsightProto .internal_static_google_cloud_recommender_v1_InsightStateInfo_descriptor; } @SuppressWarnings({"rawtypes"}) protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection( int number) { switch (number) { case 2: return internalGetStateMetadata(); default: throw new RuntimeException("Invalid map field number: " + number); } } @SuppressWarnings({"rawtypes"}) protected com.google.protobuf.MapFieldReflectionAccessor internalGetMutableMapFieldReflection( int number) { switch (number) { case 2: return internalGetMutableStateMetadata(); default: throw new RuntimeException("Invalid map field number: " + number); } } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.recommender.v1.InsightProto .internal_static_google_cloud_recommender_v1_InsightStateInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.recommender.v1.InsightStateInfo.class, com.google.cloud.recommender.v1.InsightStateInfo.Builder.class); } // Construct using com.google.cloud.recommender.v1.InsightStateInfo.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; state_ = 0; internalGetMutableStateMetadata().clear(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.recommender.v1.InsightProto .internal_static_google_cloud_recommender_v1_InsightStateInfo_descriptor; } @java.lang.Override public com.google.cloud.recommender.v1.InsightStateInfo getDefaultInstanceForType() { return com.google.cloud.recommender.v1.InsightStateInfo.getDefaultInstance(); } @java.lang.Override public com.google.cloud.recommender.v1.InsightStateInfo build() { com.google.cloud.recommender.v1.InsightStateInfo result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.recommender.v1.InsightStateInfo buildPartial() { com.google.cloud.recommender.v1.InsightStateInfo result = new com.google.cloud.recommender.v1.InsightStateInfo(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.recommender.v1.InsightStateInfo result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.state_ = state_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.stateMetadata_ = internalGetStateMetadata(); result.stateMetadata_.makeImmutable(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.recommender.v1.InsightStateInfo) { return mergeFrom((com.google.cloud.recommender.v1.InsightStateInfo) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.recommender.v1.InsightStateInfo other) { if (other == com.google.cloud.recommender.v1.InsightStateInfo.getDefaultInstance()) return this; if (other.state_ != 0) { setStateValue(other.getStateValue()); } internalGetMutableStateMetadata().mergeFrom(other.internalGetStateMetadata()); bitField0_ |= 0x00000002; this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { state_ = input.readEnum(); bitField0_ |= 0x00000001; break; } // case 8 case 18: { com.google.protobuf.MapEntry<java.lang.String, java.lang.String> stateMetadata__ = input.readMessage( StateMetadataDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); internalGetMutableStateMetadata() .getMutableMap() .put(stateMetadata__.getKey(), stateMetadata__.getValue()); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int state_ = 0; /** * * * <pre> * Insight state. * </pre> * * <code>.google.cloud.recommender.v1.InsightStateInfo.State state = 1;</code> * * @return The enum numeric value on the wire for state. */ @java.lang.Override public int getStateValue() { return state_; } /** * * * <pre> * Insight state. * </pre> * * <code>.google.cloud.recommender.v1.InsightStateInfo.State state = 1;</code> * * @param value The enum numeric value on the wire for state to set. * @return This builder for chaining. */ public Builder setStateValue(int value) { state_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Insight state. * </pre> * * <code>.google.cloud.recommender.v1.InsightStateInfo.State state = 1;</code> * * @return The state. */ @java.lang.Override public com.google.cloud.recommender.v1.InsightStateInfo.State getState() { com.google.cloud.recommender.v1.InsightStateInfo.State result = com.google.cloud.recommender.v1.InsightStateInfo.State.forNumber(state_); return result == null ? com.google.cloud.recommender.v1.InsightStateInfo.State.UNRECOGNIZED : result; } /** * * * <pre> * Insight state. * </pre> * * <code>.google.cloud.recommender.v1.InsightStateInfo.State state = 1;</code> * * @param value The state to set. * @return This builder for chaining. */ public Builder setState(com.google.cloud.recommender.v1.InsightStateInfo.State value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; state_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Insight state. * </pre> * * <code>.google.cloud.recommender.v1.InsightStateInfo.State state = 1;</code> * * @return This builder for chaining. */ public Builder clearState() { bitField0_ = (bitField0_ & ~0x00000001); state_ = 0; onChanged(); return this; } private com.google.protobuf.MapField<java.lang.String, java.lang.String> stateMetadata_; private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetStateMetadata() { if (stateMetadata_ == null) { return com.google.protobuf.MapField.emptyMapField( StateMetadataDefaultEntryHolder.defaultEntry); } return stateMetadata_; } private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetMutableStateMetadata() { if (stateMetadata_ == null) { stateMetadata_ = com.google.protobuf.MapField.newMapField(StateMetadataDefaultEntryHolder.defaultEntry); } if (!stateMetadata_.isMutable()) { stateMetadata_ = stateMetadata_.copy(); } bitField0_ |= 0x00000002; onChanged(); return stateMetadata_; } public int getStateMetadataCount() { return internalGetStateMetadata().getMap().size(); } /** * * * <pre> * A map of metadata for the state, provided by user or automations systems. * </pre> * * <code>map&lt;string, string&gt; state_metadata = 2;</code> */ @java.lang.Override public boolean containsStateMetadata(java.lang.String key) { if (key == null) { throw new NullPointerException("map key"); } return internalGetStateMetadata().getMap().containsKey(key); } /** Use {@link #getStateMetadataMap()} instead. */ @java.lang.Override @java.lang.Deprecated public java.util.Map<java.lang.String, java.lang.String> getStateMetadata() { return getStateMetadataMap(); } /** * * * <pre> * A map of metadata for the state, provided by user or automations systems. * </pre> * * <code>map&lt;string, string&gt; state_metadata = 2;</code> */ @java.lang.Override public java.util.Map<java.lang.String, java.lang.String> getStateMetadataMap() { return internalGetStateMetadata().getMap(); } /** * * * <pre> * A map of metadata for the state, provided by user or automations systems. * </pre> * * <code>map&lt;string, string&gt; state_metadata = 2;</code> */ @java.lang.Override public /* nullable */ java.lang.String getStateMetadataOrDefault( java.lang.String key, /* nullable */ java.lang.String defaultValue) { if (key == null) { throw new NullPointerException("map key"); } java.util.Map<java.lang.String, java.lang.String> map = internalGetStateMetadata().getMap(); return map.containsKey(key) ? map.get(key) : defaultValue; } /** * * * <pre> * A map of metadata for the state, provided by user or automations systems. * </pre> * * <code>map&lt;string, string&gt; state_metadata = 2;</code> */ @java.lang.Override public java.lang.String getStateMetadataOrThrow(java.lang.String key) { if (key == null) { throw new NullPointerException("map key"); } java.util.Map<java.lang.String, java.lang.String> map = internalGetStateMetadata().getMap(); if (!map.containsKey(key)) { throw new java.lang.IllegalArgumentException(); } return map.get(key); } public Builder clearStateMetadata() { bitField0_ = (bitField0_ & ~0x00000002); internalGetMutableStateMetadata().getMutableMap().clear(); return this; } /** * * * <pre> * A map of metadata for the state, provided by user or automations systems. * </pre> * * <code>map&lt;string, string&gt; state_metadata = 2;</code> */ public Builder removeStateMetadata(java.lang.String key) { if (key == null) { throw new NullPointerException("map key"); } internalGetMutableStateMetadata().getMutableMap().remove(key); return this; } /** Use alternate mutation accessors instead. */ @java.lang.Deprecated public java.util.Map<java.lang.String, java.lang.String> getMutableStateMetadata() { bitField0_ |= 0x00000002; return internalGetMutableStateMetadata().getMutableMap(); } /** * * * <pre> * A map of metadata for the state, provided by user or automations systems. * </pre> * * <code>map&lt;string, string&gt; state_metadata = 2;</code> */ public Builder putStateMetadata(java.lang.String key, java.lang.String value) { if (key == null) { throw new NullPointerException("map key"); } if (value == null) { throw new NullPointerException("map value"); } internalGetMutableStateMetadata().getMutableMap().put(key, value); bitField0_ |= 0x00000002; return this; } /** * * * <pre> * A map of metadata for the state, provided by user or automations systems. * </pre> * * <code>map&lt;string, string&gt; state_metadata = 2;</code> */ public Builder putAllStateMetadata(java.util.Map<java.lang.String, java.lang.String> values) { internalGetMutableStateMetadata().getMutableMap().putAll(values); bitField0_ |= 0x00000002; return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.recommender.v1.InsightStateInfo) } // @@protoc_insertion_point(class_scope:google.cloud.recommender.v1.InsightStateInfo) private static final com.google.cloud.recommender.v1.InsightStateInfo DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.recommender.v1.InsightStateInfo(); } public static com.google.cloud.recommender.v1.InsightStateInfo getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<InsightStateInfo> PARSER = new com.google.protobuf.AbstractParser<InsightStateInfo>() { @java.lang.Override public InsightStateInfo parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<InsightStateInfo> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<InsightStateInfo> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.recommender.v1.InsightStateInfo getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
openjdk/jmc
35,597
application/uitests/org.openjdk.jmc.test.jemmy/src/main/java/org/openjdk/jmc/test/jemmy/misc/wrappers/JvmBrowser.java
/* * Copyright (c) 2018, 2025, Oracle and/or its affiliates. All rights reserved. * * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * The contents of this file are subject to the terms of either the Universal Permissive License * v 1.0 as shown at https://oss.oracle.com/licenses/upl * * or the following license: * * Redistribution and use in source and binary forms, with or without modification, are permitted * provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this list of conditions * and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, this list of * conditions and the following disclaimer in the documentation and/or other materials provided with * the distribution. * * 3. Neither the name of the copyright holder nor the names of its contributors may be used to * endorse or promote products derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY * WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.openjdk.jmc.test.jemmy.misc.wrappers; import static org.openjdk.jmc.browser.wizards.Messages.ConnectionWizardPage_STORE_CAPTION; import static org.openjdk.jmc.ui.security.Messages.MasterPasswordWizardPage_SET_MASTER_PASSWORD_TITLE; import static org.openjdk.jmc.ui.security.Messages.MasterPasswordWizardPage_VERIFY_MASTER_PASSWORD_TITLE; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; import org.jemmy.TimeoutExpiredException; import org.junit.Assert; import org.openjdk.jmc.browser.wizards.ConnectionWizardPage; import org.openjdk.jmc.test.jemmy.MCJemmyTestBase; import org.openjdk.jmc.test.jemmy.misc.base.wrappers.MCJemmyBase; import org.openjdk.jmc.test.jemmy.misc.helpers.ConnectionHelper; import org.openjdk.jmc.test.jemmy.misc.wrappers.MCButton.Labels; import org.openjdk.jmc.ui.misc.FileSelector; import org.openjdk.jmc.ui.security.Constants; /** * The Jemmy wrapper class for the JVM Browser */ public class JvmBrowser extends MCJemmyBase { private static final String ExportTreeToFileWizardPage_TREE_NAME = org.openjdk.jmc.ui.wizards.ExportTreeToFileWizardPage.TREE_NAME; private static final String ACTION_EDIT_TEXT = org.openjdk.jmc.browser.views.Messages.JVMBrowserView_ACTION_EDIT_TEXT; private static final String ACTION_DISCONNECT_TEXT = org.openjdk.jmc.browser.views.Messages.JVMBrowserView_ACTION_DISCONNECT_TEXT; private static final String ACTION_TREE_LAYOUT_TOOLTIP = org.openjdk.jmc.browser.views.Messages.JVMBrowserView_ACTION_TREE_LAYOUT_TOOLTIP; private static final String ACTION_NEW_CONNECTION_TEXT = org.openjdk.jmc.browser.views.Messages.JVMBrowserView_ACTION_NEW_CONNECTION_TEXT; private static final String ACTION_NEW_CONNECTION_TOOLTIP = org.openjdk.jmc.browser.views.Messages.JVMBrowserView_ACTION_NEW_CONNECTION_TOOLTIP; private static final String ACTION_NEW_FOLDER_TEXT = org.openjdk.jmc.browser.views.Messages.JVMBrowserView_ACTION_NEW_FOLDER_TEXT; private static final String ACTION_NEW_FOLDER_TOOLTIP = org.openjdk.jmc.browser.views.Messages.JVMBrowserView_ACTION_NEW_FOLDER_TOOLTIP; private static final String ACTION_REMOVE_TEXT = org.openjdk.jmc.browser.views.Messages.JVMBrowserView_ACTION_REMOVE_TEXT; private static final String CONNECTION_WIZARD_STORE_CAPTION = org.openjdk.jmc.browser.wizards.Messages.ConnectionWizardPage_STORE_CAPTION; private static final String DIALOG_FOLDER_PROPERTIES_TITLE = org.openjdk.jmc.browser.views.Messages.JVMBrowserView_FOLDER_PROPERTIES_TITLE_TEXT; private static final String DIALOG_NEW_FOLDER_DEFAULT_VALUE = org.openjdk.jmc.browser.views.Messages.JVMBrowserView_DIALOG_NEW_FOLDER_DEFAULT_VALUE; private static final String DIALOG_NEW_FOLDER_TITLE = org.openjdk.jmc.browser.views.Messages.JVMBrowserView_DIALOG_NEW_FOLDER_TITLE; private static final String DIALOG_REMOVE_TITLE = org.openjdk.jmc.browser.views.Messages.JVMBrowserView_DIALOG_REMOVE_TITLE; private static final String TOO_OLD_JVM_TITLE = org.openjdk.jmc.rjmx.messages.internal.Messages.JVMSupport_TITLE_LEGACY_JVM_CONSOLE; private static final String LOCAL_PROVIDER_NAME = org.openjdk.jmc.browser.attach.Messages.LocalDescriptorProvider_PROVIDER_NAME; private static final String COMMERCIAL_FEATURES_QUESTION_TITLE = org.openjdk.jmc.flightrecorder.controlpanel.ui.messages.internal.Messages.COMMERCIAL_FEATURES_QUESTION_TITLE; private static final String DIALOG_NEW_CONNECTION_TITLE = org.openjdk.jmc.browser.wizards.Messages.ConnectionWizard_TITLE_NEW_CONNECTION; private static final String DIALOG_CONNECTION_PROPERTIES_TITLE = org.openjdk.jmc.browser.wizards.Messages.ConnectionWizard_TITLE_CONNECTION_PROPERTIES; private static final String ExportToFileWizardPage_WARN_IF_OVERWRITE_TEXT = org.openjdk.jmc.ui.wizards.Messages.ExportToFileWizardPage_WARN_IF_OVERWRITE_TEXT; private static final String JVM_BROWSER_TREE_NAME = org.openjdk.jmc.browser.views.JVMBrowserView.JVMBrowserView_TREE_NAME; private static final String ACTION_DUMP_LAST_PART_RECORDING_LABEL = org.openjdk.jmc.flightrecorder.controlpanel.ui.messages.internal.Messages.ACTION_DUMP_LAST_PART_RECORDING_LABEL; private static final String ACTION_DUMP_RECORDING_LABEL = org.openjdk.jmc.flightrecorder.controlpanel.ui.messages.internal.Messages.ACTION_DUMP_RECORDING_LABEL; private static final String ACTION_DUMP_ANY_RECORDING_LABEL = org.openjdk.jmc.flightrecorder.controlpanel.ui.messages.internal.Messages.ACTION_DUMP_ANY_RECORDING_LABEL; private static final String ACTION_DUMP_WHOLE_RECORDING_LABEL = org.openjdk.jmc.flightrecorder.controlpanel.ui.messages.internal.Messages.ACTION_DUMP_WHOLE_RECORDING_LABEL; private static final String ACTION_STOP_RECORDING_LABEL = org.openjdk.jmc.flightrecorder.controlpanel.ui.messages.internal.Messages.ACTION_STOP_RECORDING_LABEL; private static final String ACTION_CLOSE_RECORDING_LABEL = org.openjdk.jmc.flightrecorder.controlpanel.ui.messages.internal.Messages.ACTION_CLOSE_RECORDING_LABEL; private static final String ACTION_EDIT_RECORDING_LABEL = org.openjdk.jmc.flightrecorder.controlpanel.ui.messages.internal.Messages.ACTION_EDIT_RECORDING_LABEL; private static final String DUMP_RECORDING_WIZARD_PAGE_TITLE = org.openjdk.jmc.flightrecorder.controlpanel.ui.messages.internal.Messages.DUMP_RECORDING_WIZARD_PAGE_TITLE; private static final String TREE_ITEM_CONSOLE = "MBean Server"; private static final String TREE_ITEM_FLIGHTRECORDER = "Flight Recorder"; private static final String ACTION_START_CONSOLE_LABEL = "Start JMX Console"; private static final String ACTION_START_FLIGHTRECORDER_LABEL = "Start Flight Recording..."; private static final String ACTION_OPEN_PERSISTED_JMX_DATA = "Open Persisted JMX Data"; private void ensureVisibleJvmBrowser() { MC.closeWelcome(); MCMenu.ensureJvmBrowserVisible(); } private MCTree getTree() { ensureVisibleJvmBrowser(); return MCTree.getByName(getShell(), JVM_BROWSER_TREE_NAME); } private MCToolBar getToolBar() { ensureVisibleJvmBrowser(); return MCToolBar.getByToolTip(getShell(), ACTION_TREE_LAYOUT_TOOLTIP); } /** * Opens a JMX console to the Test VM. */ public void connect() { connect(MCJemmyTestBase.TEST_CONNECTION); } /** * Opens a JMX console to the specified connection name. Will, depending on the layout of the * JVM Browser, resolve the path to the connection * * @param name * the name of the process (local) to connect to */ public void connect(String name) { connect(true, createPathToLocalProcess(name)); } /** * Opens a JMX console to the specified connection path. This will, contrary to method * {@code connect(String name)}, NOT resolve the path depending of the JVM Browser layout. * * @param path * the path of the connection */ public void connectRaw(String ... path) { connect(true, path); } /** * Creates a new connection in the JVM Browser with the specified host and port, optionally with * a specific name. This method doesn't validate the inputs, but it does attempt to validate * that the connection is created, so if a test needs to verify the new connection dialog, * specific code needs to be written for that with a {@link MCDialog}. * * @param host * the hostname for the connection * @param port * the port for the connection * @param user * the user name * @param passwd * the password * @param storeCredentials * {@code true} if credentials should be stored * @param path * The path of the new connection, this can be either empty, in which case the * default naming scheme is used and the connection is created at the root level, or * it can be a list of strings representing the path of the new connection. If the * path is of length 1 and there is no item with that name, the new connection has * that string as the name, however, if that item exists then the new connection is * created beneath that item using the default name. This is basically the same for * strings of length n > 1. */ public void createConnection( String host, String port, String user, String passwd, Boolean storeCredentials, String ... path) { String connectionName = null; String[] finalPath = null; if (itemExists(path)) { // if the path specified already exists then it's a folder getTree().select(path); getTree().contextChoose(ACTION_NEW_CONNECTION_TEXT); finalPath = Arrays.copyOf(path, path.length + 1); // we need to save the name of the folder path finalPath[finalPath.length - 1] = getDefaultConnectionName(host, port); // with auto generated name } else if (path.length > 1) { // since the path doesn't exist, we have been specified a specific name String[] subPath = Arrays.copyOf(path, path.length - 1); getTree().select(subPath); getTree().contextChoose(ACTION_NEW_CONNECTION_TEXT); finalPath = path; connectionName = path[path.length - 1]; } else { if (path.length == 1) { finalPath = Arrays.copyOf(path, path.length); connectionName = path[0]; } getToolBar().clickToolItem(ACTION_NEW_CONNECTION_TOOLTIP); } MCDialog newConnection = new MCDialog(DIALOG_NEW_CONNECTION_TITLE); newConnection.enterText(ConnectionWizardPage.HOSTNAME_FIELD_NAME, host); newConnection.enterText(ConnectionWizardPage.PORT_FIELD_NAME, port); if (connectionName != null) { newConnection.enterText(ConnectionWizardPage.CONNECTIONNAME_FIELD_NAME, connectionName); } if (user != null) { newConnection.enterText(ConnectionWizardPage.USERNAME_FIELD_NAME, user); } if (passwd != null) { newConnection.enterText(ConnectionWizardPage.PASSWORD_FIELD_NAME, passwd); } if (storeCredentials != null) { MCButton.getByLabel(newConnection, ConnectionWizardPage_STORE_CAPTION, false).setState(storeCredentials); } newConnection.clickButton(MCButton.Labels.FINISH); waitForIdle(); if (storeCredentials != null && storeCredentials == true) { handleSetMasterPassword(passwd); } Assert.assertTrue("Unable to create item " + Arrays.toString(finalPath) + " from " + Arrays.toString(path), itemExists(finalPath)); } /** * Creates a new connection in the JVM Browser with the specified host and port, optionally with * a specific name. This method doesn't validate the inputs, but it does attempt to validate * that the connection is created, so if a test needs to verify the new connection dialog, * specific code needs to be written for that with a {@link MCDialog}. * * @param host * the hostname for the connection * @param port * the port for the connection * @param path * The path of the new connection, this can be either empty, in which case the * default naming scheme is used and the connection is created at the root level, or * it can be a list of strings representing the path of the new connection. If the * path is of length 1 and there is no item with that name, the new connection has * that string as the name, however, if that item exists then the new connection is * created beneath that item using the default name. This is basically the same for * strings of length n > 1. */ public void createConnection(String host, String port, String ... path) { createConnection(host, port, null, null, null, path); } private String getDefaultConnectionName(String host, String port) { String name = ""; name += (host == null) ? "localhost" : host; name += ":"; name += (port == null) ? "7091" : port; return name; } /** * Creates a folder at the specified path * * @param path * the name/path of the folder, the new name will always be the last string entered */ public void createFolder(String ... path) { if (path.length > 1) { String[] subPath = Arrays.copyOf(path, path.length - 1); getTree().select(subPath); getTree().contextChoose(ACTION_NEW_FOLDER_TEXT); } else { getToolBar().clickToolItem(ACTION_NEW_FOLDER_TOOLTIP); } MCDialog newFolder = new MCDialog(DIALOG_NEW_FOLDER_TITLE); newFolder.replaceText(DIALOG_NEW_FOLDER_DEFAULT_VALUE, path[path.length - 1]); newFolder.clickButton(MCButton.Labels.OK); waitForIdle(); Assert.assertTrue("Failed creating new folder", itemExists(path)); } /** * Deletes an item at the specified path. * * @param path * the path of the item to delete */ public void deleteItem(String ... path) { selectContextOption(ACTION_REMOVE_TEXT, path); MCDialog delete = new MCDialog(DIALOG_REMOVE_TITLE); delete.clickButton(MCButton.Labels.YES); waitForIdle(); Assert.assertFalse("Failed deleting", itemExists(path)); } /** * Makes sure that the JVM Browser is in non-tree (flat) mode */ public void disableTreeLayout() { setLayout(false); } /** * Finds out if the JVM Browser in tree layout mode * * @return {@code true} if in tree mode, otherwise {@code false} */ public boolean isTreeLayout() { return getTree().hasItem(LOCAL_PROVIDER_NAME); } private String[] createPathToLocalProcess(String processName) { if (isTreeLayout()) { return new String[] {LOCAL_PROVIDER_NAME, processName}; } else { return new String[] {processName}; } } /** * Closes the JMX console for the default test connection */ public void disconnect() { disconnect(createPathToLocalProcess(MCJemmyTestBase.TEST_CONNECTION)); } /** * Closes the JMX console of the specified connection name * * @param path * the name of the connection */ public void disconnect(String ... path) { selectContextOption(ACTION_DISCONNECT_TEXT, path); MCDialog disconnectDialog = new MCDialog(ACTION_DISCONNECT_TEXT); disconnectDialog.clickButton(MCButton.Labels.OK); } /** * Stops the named recording on the default test connection * * @param name * the name of the running recording */ public void stopRecording(String name) { stopRecording(name, createPathToLocalProcess(MCJemmyTestBase.TEST_CONNECTION)); } /** * Stops the named recording on the specified connection path * * @param name * the name of the running recording * @param path * the path to the connection for the running recording */ public void stopRecording(String name, String ... path) { selectContextOption(ACTION_STOP_RECORDING_LABEL, createRecordingPath(name, path)); } /** * Closes the named recording on the default test connection * * @param name * the name of the running recording */ public void closeRecording(String name) { closeRecording(name, createPathToLocalProcess(MCJemmyTestBase.TEST_CONNECTION)); } /** * Closes the named recording on the specified connection path * * @param name * the name of the running recording * @param path * the path to the connection for the running recording */ public void closeRecording(String name, String ... path) { selectContextOption(ACTION_CLOSE_RECORDING_LABEL, createRecordingPath(name, path)); } /** * Starts the dump default recording wizard on the default test connection * * @return a {@link MCDialog} */ public MCDialog dumpDefaultRecording() { return dumpDefaultRecording(createPathToLocalProcess(MCJemmyTestBase.TEST_CONNECTION)); } /** * Starts the dump default recording wizard on the specified connection path * * @param path * the path to the connection for the running recording * @return a {@link MCDialog} */ public MCDialog dumpDefaultRecording(String ... path) { return doDumpRecording(ACTION_DUMP_ANY_RECORDING_LABEL, path); } /** * Double clicks a recording for the default test connection and returns immediately * * @param name * the name of the recording * @return a {@link MCDialog} */ public MCDialog doubleClickRecording(String name) { return doubleClickRecording(name, createPathToLocalProcess(MCJemmyTestBase.TEST_CONNECTION)); } /** * Double-clicks a recording for the specified connection and returns immediately * * @param name * the name of the recording * @param path * the path of the connection * @return a {@link MCDialog} */ public MCDialog doubleClickRecording(String name, String ... path) { getTree().selectAndClick(2, createRecordingPath(name, path)); return MCDialog.getByAnyDialogTitle(false, DUMP_RECORDING_WIZARD_PAGE_TITLE); } /** * Starts the dump recording wizard on the named recording on the default test connection * * @param name * the name of the running recording * @return a {@link MCDialog} */ public MCDialog dumpRecording(String name) { return dumpRecording(name, createPathToLocalProcess(MCJemmyTestBase.TEST_CONNECTION)); } /** * Starts the dump recording wizard on the named recording on the specified connection path * * @param name * the name of the running recording * @param path * the path to the connection or recording * @return a {@link MCDialog} */ public MCDialog dumpRecording(String name, String ... path) { return doDumpRecording(ACTION_DUMP_RECORDING_LABEL, createRecordingPath(name, path)); } private MCDialog doDumpRecording(String actionName, String ... path) { selectContextOption(actionName, path); return MCDialog.getByAnyDialogTitle(false, DUMP_RECORDING_WIZARD_PAGE_TITLE); } /** * Starts the edit recording wizard on the named recording on the default test connection * * @param name * the name of the running recording * @return a {@link JfrWizard} */ public JfrWizard editRecording(String name) { return editRecording(name, createPathToLocalProcess(MCJemmyTestBase.TEST_CONNECTION)); } /** * Starts the edit recording wizard on the named recording on the specified connection path * * @param name * the name of the running recording * @param path * the path to the connection for the running recording * @return a {@link JfrWizard} */ public JfrWizard editRecording(String name, String ... path) { selectContextOption(ACTION_EDIT_RECORDING_LABEL, createRecordingPath(name, path)); return new JfrWizard(JfrWizard.EDIT_RECORDING_WIZARD_PAGE_TITLE); } /** * Dumps all of the named recording on the default test connection * * @param name * the name of the running recording */ public void dumpWholeRecording(String name) { dumpWholeRecording(name, MCJemmyTestBase.TEST_CONNECTION); } /** * Dumps all of the named recording on the specified connection path * * @param name * the name of the running recording * @param connection * the path to the connection for the running recording */ public void dumpWholeRecording(String name, String connection) { selectContextOption(ACTION_DUMP_WHOLE_RECORDING_LABEL, createRecordingPath(name, createPathToLocalProcess(connection))); waitForSubstringMatchedEditor(cleanConnectionName(connection)); } /** * Dumps the last part of the named recording on the default test connection * * @param name * the name of the running recording */ public void dumpLastPartOfRecording(String name) { dumpLastPartOfRecording(name, MCJemmyTestBase.TEST_CONNECTION); } /** * Dumps the last part of the named recording on the specified connection path * * @param name * the name of the running recording * @param connection * the path to the connection for the running recording */ public void dumpLastPartOfRecording(String name, String connection) { selectContextOption(ACTION_DUMP_LAST_PART_RECORDING_LABEL, createRecordingPath(name, createPathToLocalProcess(connection))); waitForSubstringMatchedEditor(cleanConnectionName(connection)); } private String cleanConnectionName(String connection) { return connection.replaceAll("[^A-Za-z0-9]", ""); } /** * Returns the filename of a currently running recording (on the default test connection) * * @param name * the name of the recording * @return the file name */ public String getRunningRecordingFileName(String name) { return getRunningRecordingFileName(name, createPathToLocalProcess(MCJemmyTestBase.TEST_CONNECTION)); } /** * Returns the filename of a currently running recording * * @param name * the name of the recording * @param path * the connection path * @return the file name */ public String getRunningRecordingFileName(String name, String ... path) { // Open the editor on the recording to get the file name JfrWizard recordingwizard = editRecording(name, path); String fileName = recordingwizard.getFileName(); recordingwizard.cancelWizard(); return fileName; } /** * Edits a connection with the specified parameters. If a parameter is not null then that field * is set to the parameter value * * @param name * the new name to give the connection * @param host * the new host * @param port * the new port * @param user * the username to use in the jmx connection * @param serverPasswd * the server password for the specified username * @param mcPasswd * the password Mission Control uses to save the credentials locally * @param save * whether or not to save the credentials locally * @param path * the path of the connection to edit */ public void editConnection( String name, String host, String port, String user, String serverPasswd, String mcPasswd, Boolean save, String ... path) { MCMenu.ensureJvmBrowserVisible(); getTree().select(path); getTree().contextChoose(ACTION_EDIT_TEXT); MCDialog properties = new MCDialog(DIALOG_CONNECTION_PROPERTIES_TITLE); if (host != null) { properties.enterText(ConnectionWizardPage.HOSTNAME_FIELD_NAME, host); } if (port != null) { properties.enterText(ConnectionWizardPage.PORT_FIELD_NAME, port); } if (user != null) { properties.enterText(ConnectionWizardPage.USERNAME_FIELD_NAME, user); } if (serverPasswd != null) { properties.enterText(ConnectionWizardPage.PASSWORD_FIELD_NAME, serverPasswd); } if (name != null) { properties.enterText(ConnectionWizardPage.CONNECTIONNAME_FIELD_NAME, name); } if (save != null) { properties.setButtonState(CONNECTION_WIZARD_STORE_CAPTION, save); } properties.clickButton(MCButton.Labels.FINISH); } /** * Makes sure that the JVM Browser is in tree mode */ public void enableTreeLayout() { setLayout(true); } /** * Finds out whether or not a connection with the specified path exists * * @param path * the path to find * @return {@code true} if a connection is found, {@code false} if not. */ public boolean itemExists(String ... path) { return getTree().hasItem(path); } /** * Opens the persisted JMX data editor for the JVM running Mission Control. */ public void openPersistedJMXData() { openPersistedJMXData(createPathToLocalProcess(MCJemmyTestBase.TEST_CONNECTION)); } /** * Opens the persisted JMX data editor for the named connection. * * @param path * the path to the connection */ public void openPersistedJMXData(String ... path) { MCMenu.ensureJvmBrowserVisible(); selectAction(TREE_ITEM_CONSOLE, path); getTree().contextChoose(ACTION_OPEN_PERSISTED_JMX_DATA); Assert.assertTrue("Unable to find console editor \"Persisted JMX Data\"", MCJemmyBase.waitForSubstringMatchedEditor("Persisted JMX Data")); } /** * Renames a folder at the specified path * * @param newName * the new name for the folder * @param path * the path of the folder to rename */ public void renameFolder(String newName, String ... path) { String[] finalPath = Arrays.copyOf(path, path.length); finalPath[path.length - 1] = newName; getTree().select(path); getTree().contextChoose(ACTION_EDIT_TEXT); MCDialog rename = new MCDialog(DIALOG_FOLDER_PROPERTIES_TITLE); rename.replaceText(path[path.length - 1], newName); rename.closeWithButton(MCButton.Labels.OK); waitForIdle(); Assert.assertTrue("Failed to properly rename folder", itemExists(finalPath)); } /** * Method used to start non-standard features. Also used to start standard-features with * non-standard expected behavior, i.e. dynamic enablement dialog. * * @param path * the name of the connection to use * @param option * the name of the feature to start */ public void selectContextOption(String option, String ... path) { MCMenu.ensureJvmBrowserVisible(); getTree().select(path); getTree().contextChoose(option); } /** * Attempts to connect to an MBean Server without verifying the connection. If connecting to a * pre-7u4 JVM the calling code may need to handle the resulting dialog * * @param path * the path of the connection */ public void unverifiedConnect(String ... path) { connect(false, path); } /** * Starts the Flight Recording wizard for the test connection * * @return the Flight Recording wizard dialog ({@link JfrWizard}) */ public JfrWizard startFlightRecordingWizard() { return startFlightRecordingWizard(createPathToLocalProcess(MCJemmyTestBase.TEST_CONNECTION)); } /** * Starts the Flight Recording wizard for the connection with the given path * * @param path * the path to the connection for which to start the flight recording * @return the Flight Recording wizard dialog ({@link JfrWizard}) */ public JfrWizard startFlightRecordingWizard(String ... path) { return startFlightRecordingWizard(false, path); } /** * Starts the Flight Recording wizard for the connection with the given path * * @param enableCommercialFeatures * {@code true} if a dialog for dynamically enabling commercial features is expected. * Otherwise {@code false} * @param path * the path to the connection for which to start the flight recording * @return the Flight Recording wizard dialog ({@link JfrWizard}) */ public JfrWizard startFlightRecordingWizard(boolean enableCommercialFeatures, String ... path) { MCMenu.ensureJvmBrowserVisible(); selectAction(TREE_ITEM_FLIGHTRECORDER, path); getTree().contextChoose(ACTION_START_FLIGHTRECORDER_LABEL); if (enableCommercialFeatures) { MCDialog dialog = new MCDialog(COMMERCIAL_FEATURES_QUESTION_TITLE); dialog.closeWithButton(Labels.YES); } return new JfrWizard(JfrWizard.START_RECORDING_WIZARD_PAGE_TITLE); } /** * Exports connections to file, it does not assert that the file exists. * * @param fileName * the name of the file to export the connection(s) to * @param names * the connection(s) to export. */ public void exportConnections(String fileName, String ... names) { MCDialog dialog = MCMenu.openExportDialog(); MCTree tree = MCTree.getFirstVisible(dialog); tree.select("Mission Control", "Connections"); MCButton.getByLabel(dialog, MCButton.Labels.NEXT, false).click(); tree = MCTree.getByName(dialog.getDialogShell(), ExportTreeToFileWizardPage_TREE_NAME); MCButton.getByLabel(dialog, ExportToFileWizardPage_WARN_IF_OVERWRITE_TEXT, false).setState(false); for (String name : names) { tree.select(name); tree.setSelectedItemState(true); } MCText.getByName(dialog, FileSelector.FILENAME_FIELD_NAME).setText(fileName); MCButton.getByLabel(dialog, MCButton.Labels.FINISH, false).click(); sleep(1000); } /** * Attempts to import connections from the given filename. * * @param fileName * absolute URI for the file to import. * @param fileExists * specifies if the file is expected to be found */ public void importConnections(String fileName, Boolean fileExists) { MCDialog dialog = MCMenu.openImportDialog(); MCTree tree = MCTree.getFirstVisible(dialog); tree.select("Mission Control", "Connections"); MCButton.getByLabel(dialog, MCButton.Labels.NEXT, false).click(); MCText.getByName(dialog, FileSelector.FILENAME_FIELD_NAME).setText(fileName); if (fileExists) { MCButton.getByLabel(dialog, MCButton.Labels.FINISH, false).click(); } else { Assert.assertFalse("Finish button not disabled", MCButton.getByLabel(dialog, MCButton.Labels.FINISH, false).isEnabled()); Assert.assertFalse("Next button not disabled", MCButton.getByLabel(dialog, MCButton.Labels.NEXT, false).isEnabled()); MCButton.getByLabel(dialog, MCButton.Labels.CANCEL, false).click(); } sleep(1000); } /** * Handles the Set Master Password dialog. * * @param password * the password used as a new master password. Must be longer than five characters. */ public void handleSetMasterPassword(String password) { MCDialog masterPasswordShell = MCDialog.getByAnyDialogTitle(MasterPasswordWizardPage_SET_MASTER_PASSWORD_TITLE, MasterPasswordWizardPage_VERIFY_MASTER_PASSWORD_TITLE); if (masterPasswordShell.getText().equals(MasterPasswordWizardPage_SET_MASTER_PASSWORD_TITLE)) { masterPasswordShell.enterText(Constants.PASSWORD1_FIELD_NAME, password); masterPasswordShell.enterText(Constants.PASSWORD2_FIELD_NAME, password); } else { masterPasswordShell.enterText(Constants.PASSWORD1_FIELD_NAME, password); } masterPasswordShell.clickButton(MCButton.Labels.OK); sleep(1000); } /** * Opens a JMX console to the specified connection name. Will, depending on the layout of the * JVM Browser, resolve the path to the connection * * @param valid * will, if {@code true}, validate that an appropriate dialog or console editor is * opened * @param path * the path of the connection */ public void connect(boolean valid, String ... path) { MCMenu.ensureJvmBrowserVisible(); String connectionName = path[path.length - 1]; selectAction(TREE_ITEM_CONSOLE, path); getTree().contextChoose(ACTION_START_CONSOLE_LABEL); if (valid) { if (!ConnectionHelper.is7u40orLater(connectionName)) { try { MCDialog dialog = new MCDialog(TOO_OLD_JVM_TITLE); dialog.closeWithButton(MCButton.Labels.OK); } catch (TimeoutExpiredException tee) { Assert.fail("JVM Too Old warning did not show."); } } Assert.assertTrue("Could not find JMX Console for connection \"" + connectionName + "\"", waitForSubstringMatchedEditor(connectionName)); } } /** * Does substring matching of the specified recording name against the currently running * recordings for the specified connection * * @param name * the name of the recording to search for * @param path * the path to the connection * @return {@code true} if there is a matching recording. Otherwise {@code false} */ public boolean hasRecording(String name, String ... path) { return checkHasRecording(name, getCurrentRecordings(path)); } /** * Does substring matching of the specified recording name against the currently running * recordings * * @param name * the name of the recording to search for * @return {@code true} if there is a matching recording. Otherwise {@code false} */ public boolean hasRecording(String name) { return checkHasRecording(name, getCurrentRecordings()); } private boolean checkHasRecording(String name, List<String> recordings) { boolean result = false; for (String recording : recordings) { if (recording.contains(name)) { result = true; break; } } return result; } /** * Returns a list of currently running recordings for the default test connection * * @return a {@link List} of {@link String} of the currently running recordings. {@code null} if * no recordings could be found */ public List<String> getCurrentRecordings() { return getCurrentRecordings(MCJemmyTestBase.TEST_CONNECTION); } /** * Returns a list of strings containing the currently running recordings on this JVM * * @param path * the path to the connection * @return a {@link List} of {@link String} of the currently running recordings. {@code null} if * no recordings could be found */ public List<String> getCurrentRecordings(String ... path) { MCMenu.ensureJvmBrowserVisible(); getTree().select(createRecordingPath(null, path)); getTree().expand(); // wait for the node to expand and be populated (info retrieved from the JVM) before looking for an error dialog sleep(1000); MCDialog error = MCDialog.getByAnyDialogTitle(false, "Problem retrieving information for"); List<String> result = null; if (error != null) { error.closeWithButton(Labels.OK); // create and return an empty list result = new ArrayList<>(); } else { result = getTree().getSelectedItemChildrenTexts(); } return result; } private String[] createRecordingPath(String recordingName, String ... path) { List<String> completePath = Arrays.asList(path).stream().collect(Collectors.toList()); completePath.add("Flight Recorder"); if (recordingName != null) { completePath.add(recordingName); } return completePath.toArray(new String[completePath.size()]); } private void selectAction(String action, String ... path) { String[] actionPath = Arrays.copyOf(path, path.length + 1); actionPath[path.length] = action; getTree().select(actionPath); } private void setLayout(boolean tree) { if (tree) { getToolBar().selectToolItem(ACTION_TREE_LAYOUT_TOOLTIP); } else { getToolBar().unselectToolItem(ACTION_TREE_LAYOUT_TOOLTIP); } } }
apache/pulsar
36,918
pulsar-broker/src/test/java/org/apache/pulsar/broker/service/PersistentDispatcherFailoverConsumerTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pulsar.broker.service; import static org.apache.pulsar.common.protocol.Commands.DEFAULT_CONSUMER_EPOCH; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.matches; import static org.mockito.ArgumentMatchers.same; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertNull; import static org.testng.AssertJUnit.assertEquals; import static org.testng.AssertJUnit.assertSame; import static org.testng.AssertJUnit.assertTrue; import io.netty.buffer.ByteBuf; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.EventLoopGroup; import io.netty.util.concurrent.DefaultThreadFactory; import java.lang.reflect.Field; import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Supplier; import lombok.Cleanup; import org.apache.bookkeeper.mledger.AsyncCallbacks.AddEntryCallback; import org.apache.bookkeeper.mledger.AsyncCallbacks.DeleteCursorCallback; import org.apache.bookkeeper.mledger.AsyncCallbacks.DeleteLedgerCallback; import org.apache.bookkeeper.mledger.AsyncCallbacks.OpenCursorCallback; import org.apache.bookkeeper.mledger.AsyncCallbacks.OpenLedgerCallback; import org.apache.bookkeeper.mledger.ManagedCursor; import org.apache.bookkeeper.mledger.ManagedLedger; import org.apache.bookkeeper.mledger.ManagedLedgerConfig; import org.apache.bookkeeper.mledger.ManagedLedgerException; import org.apache.bookkeeper.mledger.ManagedLedgerFactory; import org.apache.bookkeeper.mledger.PositionFactory; import org.apache.bookkeeper.mledger.impl.ManagedCursorImpl; import org.apache.pulsar.broker.ServiceConfiguration; import org.apache.pulsar.broker.namespace.NamespaceService; import org.apache.pulsar.broker.service.persistent.AbstractPersistentDispatcherMultipleConsumers; import org.apache.pulsar.broker.service.persistent.PersistentDispatcherMultipleConsumers; import org.apache.pulsar.broker.service.persistent.PersistentDispatcherSingleActiveConsumer; import org.apache.pulsar.broker.service.persistent.PersistentSubscription; import org.apache.pulsar.broker.service.persistent.PersistentTopic; import org.apache.pulsar.broker.testcontext.PulsarTestContext; import org.apache.pulsar.client.api.MessageId; import org.apache.pulsar.common.api.proto.BaseCommand; import org.apache.pulsar.common.api.proto.CommandActiveConsumerChange; import org.apache.pulsar.common.api.proto.CommandSubscribe.InitialPosition; import org.apache.pulsar.common.api.proto.CommandSubscribe.SubType; import org.apache.pulsar.common.api.proto.ProtocolVersion; import org.apache.pulsar.common.naming.NamespaceBundle; import org.apache.pulsar.common.util.netty.EventLoopUtil; import org.awaitility.Awaitility; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testng.Assert; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; @Test(groups = "quarantine") public class PersistentDispatcherFailoverConsumerTest { private ServerCnx serverCnx; private ServerCnx serverCnxWithOldVersion; private ManagedLedger ledgerMock; private ManagedCursor cursorMock; private ChannelHandlerContext channelCtx; private LinkedBlockingQueue<CommandActiveConsumerChange> consumerChanges; protected PulsarTestContext pulsarTestContext; final String successTopicName = "persistent://part-perf/global/perf.t1/ptopic"; final String failTopicName = "persistent://part-perf/global/perf.t1/pfailTopic"; @BeforeMethod public void setup() throws Exception { ServiceConfiguration svcConfig = new ServiceConfiguration(); svcConfig.setBrokerShutdownTimeoutMs(0L); svcConfig.setLoadBalancerOverrideBrokerNicSpeedGbps(Optional.of(1.0d)); svcConfig.setClusterName("pulsar-cluster"); svcConfig.setSystemTopicEnabled(false); svcConfig.setTopicLevelPoliciesEnabled(false); pulsarTestContext = PulsarTestContext.builderForNonStartableContext() .config(svcConfig) .spyByDefault() .build(); consumerChanges = new LinkedBlockingQueue<>(); this.channelCtx = mock(ChannelHandlerContext.class); doAnswer(invocationOnMock -> { ByteBuf buf = invocationOnMock.getArgument(0); ByteBuf cmdBuf = buf.retainedSlice(4, buf.writerIndex() - 4); try { int cmdSize = (int) cmdBuf.readUnsignedInt(); int writerIndex = cmdBuf.writerIndex(); BaseCommand cmd = new BaseCommand(); cmd.parseFrom(cmdBuf, cmdSize); if (cmd.hasActiveConsumerChange()) { consumerChanges.put(cmd.getActiveConsumerChange()); } } finally { cmdBuf.release(); } return null; }).when(channelCtx).writeAndFlush(any(), any()); serverCnx = pulsarTestContext.createServerCnxSpy(); doReturn(true).when(serverCnx).isActive(); doReturn(true).when(serverCnx).isWritable(); doReturn(new InetSocketAddress("localhost", 1234)).when(serverCnx).clientAddress(); when(serverCnx.getRemoteEndpointProtocolVersion()).thenReturn(ProtocolVersion.v12.getValue()); when(serverCnx.ctx()).thenReturn(channelCtx); doReturn(new PulsarCommandSenderImpl(null, serverCnx)) .when(serverCnx).getCommandSender(); serverCnxWithOldVersion = pulsarTestContext.createServerCnxSpy(); doReturn(true).when(serverCnxWithOldVersion).isActive(); doReturn(true).when(serverCnxWithOldVersion).isWritable(); doReturn(new InetSocketAddress("localhost", 1234)) .when(serverCnxWithOldVersion).clientAddress(); when(serverCnxWithOldVersion.getRemoteEndpointProtocolVersion()) .thenReturn(ProtocolVersion.v11.getValue()); when(serverCnxWithOldVersion.ctx()).thenReturn(channelCtx); doReturn(new PulsarCommandSenderImpl(null, serverCnxWithOldVersion)) .when(serverCnxWithOldVersion).getCommandSender(); NamespaceService nsSvc = pulsarTestContext.getPulsarService().getNamespaceService(); doReturn(true).when(nsSvc).isServiceUnitOwned(any(NamespaceBundle.class)); doReturn(CompletableFuture.completedFuture(mock(NamespaceBundle.class))).when(nsSvc).getBundleAsync(any()); doReturn(CompletableFuture.completedFuture(true)).when(nsSvc).checkBundleOwnership(any(), any()); setupMLAsyncCallbackMocks(); } @AfterMethod(alwaysRun = true) public void shutdown() throws Exception { if (pulsarTestContext != null) { pulsarTestContext.close(); pulsarTestContext = null; } } void setupMLAsyncCallbackMocks() { ledgerMock = mock(ManagedLedger.class); cursorMock = mock(ManagedCursorImpl.class); doReturn(new ArrayList<>()).when(ledgerMock).getCursors(); doReturn(new ManagedLedgerConfig()).when(ledgerMock).getConfig(); doReturn("mockCursor").when(cursorMock).getName(); // call openLedgerComplete with ledgerMock on ML factory asyncOpen ManagedLedgerFactory managedLedgerFactory = pulsarTestContext.getDefaultManagedLedgerFactory(); doAnswer(invocationOnMock -> { ((OpenLedgerCallback) invocationOnMock.getArguments()[2]).openLedgerComplete(ledgerMock, null); return null; }).when(managedLedgerFactory) .asyncOpen(matches(".*success.*"), any(ManagedLedgerConfig.class), any(OpenLedgerCallback.class), any(Supplier.class), any()); // call openLedgerFailed on ML factory asyncOpen doAnswer(invocationOnMock -> { ((OpenLedgerCallback) invocationOnMock.getArguments()[2]) .openLedgerFailed(new ManagedLedgerException("Managed ledger failure"), null); return null; }).when(managedLedgerFactory) .asyncOpen(matches(".*fail.*"), any(ManagedLedgerConfig.class), any(OpenLedgerCallback.class), any(Supplier.class), any()); // call addComplete on ledger asyncAddEntry doAnswer(invocationOnMock -> { ((AddEntryCallback) invocationOnMock.getArguments()[1]).addComplete( PositionFactory.create(1, 1), null, null); return null; }).when(ledgerMock).asyncAddEntry(any(byte[].class), any(AddEntryCallback.class), any()); // call openCursorComplete on cursor asyncOpen doAnswer(invocationOnMock -> { ((OpenCursorCallback) invocationOnMock.getArguments()[2]).openCursorComplete(cursorMock, null); return null; }).when(ledgerMock) .asyncOpenCursor(matches(".*success.*"), any(InitialPosition.class), any(OpenCursorCallback.class), any()); // call deleteLedgerComplete on ledger asyncDelete doAnswer(invocationOnMock -> { ((DeleteLedgerCallback) invocationOnMock.getArguments()[0]).deleteLedgerComplete(null); return null; }).when(ledgerMock).asyncDelete(any(DeleteLedgerCallback.class), any()); doAnswer(invocationOnMock -> { ((DeleteCursorCallback) invocationOnMock.getArguments()[1]).deleteCursorComplete(null); return null; }).when(ledgerMock).asyncDeleteCursor(matches(".*success.*"), any(DeleteCursorCallback.class), any()); } private void verifyActiveConsumerChange(CommandActiveConsumerChange change, long consumerId, boolean isActive) { assertEquals(consumerId, change.getConsumerId()); assertEquals(isActive, change.isIsActive()); } @Test(timeOut = 10000) public void testAddConsumerWhenClosed() throws Exception { PersistentTopic topic = new PersistentTopic(successTopicName, ledgerMock, pulsarTestContext.getBrokerService()); PersistentSubscription sub = new PersistentSubscription(topic, "sub-1", cursorMock, false); PersistentDispatcherSingleActiveConsumer pdfc = new PersistentDispatcherSingleActiveConsumer(cursorMock, SubType.Failover, 0, topic, sub); pdfc.close().get(); Consumer consumer = mock(Consumer.class); pdfc.addConsumer(consumer); verify(consumer, times(1)).disconnect(); assertEquals(0, pdfc.consumers.size()); } @Test public void testConsumerGroupChangesWithOldNewConsumers() throws Exception { PersistentTopic topic = new PersistentTopic(successTopicName, ledgerMock, pulsarTestContext.getBrokerService()); PersistentSubscription sub = new PersistentSubscription(topic, "sub-1", cursorMock, false); int partitionIndex = 0; PersistentDispatcherSingleActiveConsumer pdfc = new PersistentDispatcherSingleActiveConsumer(cursorMock, SubType.Failover, partitionIndex, topic, sub); // 1. Verify no consumers connected assertFalse(pdfc.isConsumerConnected()); // 2. Add old consumer Consumer consumer1 = new Consumer(sub, SubType.Exclusive, topic.getName(), 1 /* consumer id */, 0, "Cons1"/* consumer name */, true, serverCnxWithOldVersion, "myrole-1", Collections.emptyMap(), false, null, MessageId.latest, DEFAULT_CONSUMER_EPOCH); pdfc.addConsumer(consumer1); List<Consumer> consumers = pdfc.getConsumers(); assertSame(consumers.get(0).consumerName(), consumer1.consumerName()); assertEquals(1, consumers.size()); assertNull(consumerChanges.poll()); verify(channelCtx, times(0)).write(any()); // 3. Add new consumer Consumer consumer2 = new Consumer(sub, SubType.Exclusive, topic.getName(), 2 /* consumer id */, 0, "Cons2"/* consumer name */, true, serverCnx, "myrole-1", Collections.emptyMap(), false, null, MessageId.latest, DEFAULT_CONSUMER_EPOCH); pdfc.addConsumer(consumer2); consumers = pdfc.getConsumers(); assertSame(consumers.get(0).consumerName(), consumer1.consumerName()); assertEquals(2, consumers.size()); CommandActiveConsumerChange change = consumerChanges.take(); verifyActiveConsumerChange(change, 2, false); verify(channelCtx, times(1)).writeAndFlush(any(), any()); } @Test public void testAddRemoveConsumer() throws Exception { log.info("--- Starting PersistentDispatcherFailoverConsumerTest::testAddConsumer ---"); PersistentTopic topic = new PersistentTopic(successTopicName, ledgerMock, pulsarTestContext.getBrokerService()); PersistentSubscription sub = new PersistentSubscription(topic, "sub-1", cursorMock, false); int partitionIndex = 4; PersistentDispatcherSingleActiveConsumer pdfc = new PersistentDispatcherSingleActiveConsumer(cursorMock, SubType.Failover, partitionIndex, topic, sub); // 1. Verify no consumers connected assertFalse(pdfc.isConsumerConnected()); // 2. Add consumer Consumer consumer1 = spy(new Consumer(sub, SubType.Exclusive, topic.getName(), 1 /* consumer id */, 0, "Cons1"/* consumer name */, true, serverCnx, "myrole-1", Collections.emptyMap(), false /* read compacted */, null, MessageId.latest, DEFAULT_CONSUMER_EPOCH)); pdfc.addConsumer(consumer1); List<Consumer> consumers = pdfc.getConsumers(); assertSame(consumers.get(0).consumerName(), consumer1.consumerName()); assertEquals(1, consumers.size()); CommandActiveConsumerChange change = consumerChanges.take(); verifyActiveConsumerChange(change, 1, true); verify(consumer1, times(1)).notifyActiveConsumerChange(same(consumer1)); // 3. Add again, duplicate allowed pdfc.addConsumer(consumer1); consumers = pdfc.getConsumers(); assertSame(consumers.get(0).consumerName(), consumer1.consumerName()); assertEquals(2, consumers.size()); // 4. Verify active consumer assertSame(pdfc.getActiveConsumer().consumerName(), consumer1.consumerName()); // get the notified with who is the leader change = consumerChanges.poll(10, TimeUnit.SECONDS); assertNotNull(change); verifyActiveConsumerChange(change, 1, true); verify(consumer1, times(2)).notifyActiveConsumerChange(same(consumer1)); // 5. Add another consumer which does not change active consumer Consumer consumer2 = spy(new Consumer(sub, SubType.Exclusive, topic.getName(), 2 /* consumer id */, 0, "Cons2"/* consumer name */, true, serverCnx, "myrole-1", Collections.emptyMap(), false /* read compacted */, null, MessageId.latest, DEFAULT_CONSUMER_EPOCH)); pdfc.addConsumer(consumer2); consumers = pdfc.getConsumers(); assertSame(pdfc.getActiveConsumer().consumerName(), consumer1.consumerName()); assertEquals(3, consumers.size()); // get notified with who is the leader change = consumerChanges.poll(10, TimeUnit.SECONDS); assertNotNull(change); verifyActiveConsumerChange(change, 2, false); verify(consumer1, times(2)).notifyActiveConsumerChange(same(consumer1)); verify(consumer2, times(1)).notifyActiveConsumerChange(same(consumer1)); // 6. Add a consumer which changes active consumer Consumer consumer0 = spy(new Consumer(sub, SubType.Exclusive, topic.getName(), 0 /* consumer id */, 0, "Cons0"/* consumer name */, true, serverCnx, "myrole-1", Collections.emptyMap(), false /* read compacted */, null, MessageId.latest, DEFAULT_CONSUMER_EPOCH)); pdfc.addConsumer(consumer0); consumers = pdfc.getConsumers(); assertSame(pdfc.getActiveConsumer().consumerName(), consumer0.consumerName()); assertEquals(4, consumers.size()); // all consumers will receive notifications change = consumerChanges.poll(10, TimeUnit.SECONDS); assertNotNull(change); verifyActiveConsumerChange(change, 0, true); change = consumerChanges.poll(10, TimeUnit.SECONDS); assertNotNull(change); verifyActiveConsumerChange(change, 1, false); change = consumerChanges.poll(10, TimeUnit.SECONDS); assertNotNull(change); verifyActiveConsumerChange(change, 1, false); change = consumerChanges.poll(10, TimeUnit.SECONDS); assertNotNull(change); verifyActiveConsumerChange(change, 2, false); verify(consumer0, times(1)).notifyActiveConsumerChange(same(consumer0)); verify(consumer1, times(2)).notifyActiveConsumerChange(same(consumer1)); verify(consumer1, times(2)).notifyActiveConsumerChange(same(consumer0)); verify(consumer2, times(1)).notifyActiveConsumerChange(same(consumer1)); verify(consumer2, times(1)).notifyActiveConsumerChange(same(consumer0)); // 7. Remove last consumer to make active consumer change. pdfc.removeConsumer(consumer2); consumers = pdfc.getConsumers(); assertSame(pdfc.getActiveConsumer().consumerName(), consumer1.consumerName()); assertEquals(3, consumers.size()); change = consumerChanges.poll(10, TimeUnit.SECONDS); assertNotNull(change); verifyActiveConsumerChange(change, 0, false); change = consumerChanges.poll(10, TimeUnit.SECONDS); assertNotNull(change); verifyActiveConsumerChange(change, 1, true); change = consumerChanges.poll(10, TimeUnit.SECONDS); assertNotNull(change); verifyActiveConsumerChange(change, 1, true); // 8. Verify if we cannot unsubscribe when more than one consumer is connected assertFalse(pdfc.canUnsubscribe(consumer0)); // 9. Remove inactive consumer pdfc.removeConsumer(consumer0); consumers = pdfc.getConsumers(); assertSame(pdfc.getActiveConsumer().consumerName(), consumer1.consumerName()); assertEquals(2, consumers.size()); // not consumer group changes assertNull(consumerChanges.poll(10, TimeUnit.SECONDS)); // 10. Attempt to remove already removed consumer String cause = ""; try { pdfc.removeConsumer(consumer0); } catch (Exception e) { cause = e.getMessage(); } assertEquals(cause, "Consumer was not connected"); // 11. Remove same consumer pdfc.removeConsumer(consumer1); consumers = pdfc.getConsumers(); assertSame(pdfc.getActiveConsumer().consumerName(), consumer1.consumerName()); assertEquals(1, consumers.size()); // not consumer group changes assertNull(consumerChanges.poll(10, TimeUnit.SECONDS)); // 11. With only one consumer, unsubscribe is allowed assertTrue(pdfc.canUnsubscribe(consumer1)); } private String[] sortConsumerNameByHashSelector(String...consumerNames) throws Exception { String[] result = new String[consumerNames.length]; PersistentTopic topic = new PersistentTopic(successTopicName, ledgerMock, pulsarTestContext.getBrokerService()); PersistentSubscription sub = new PersistentSubscription(topic, "sub-1", cursorMock, false); int partitionIndex = -1; PersistentDispatcherSingleActiveConsumer dispatcher = new PersistentDispatcherSingleActiveConsumer(cursorMock, SubType.Failover, partitionIndex, topic, sub); for (String consumerName : consumerNames){ Consumer consumer = spy(new Consumer(sub, SubType.Failover, topic.getName(), 999 /* consumer id */, 1, consumerName/* consumer name */, true, serverCnx, "myrole-1", Collections.emptyMap(), false /* read compacted */, null, MessageId.latest, DEFAULT_CONSUMER_EPOCH)); dispatcher.addConsumer(consumer); } for (int i = 0; i < consumerNames.length; i++) { result[i] = dispatcher.getActiveConsumer().consumerName(); dispatcher.removeConsumer(dispatcher.getActiveConsumer()); } consumerChanges.clear(); return result; } private CommandActiveConsumerChange waitActiveChangeEvent(int consumerId) throws Exception { AtomicReference<CommandActiveConsumerChange> res = new AtomicReference<>(); Awaitility.await().until(() -> { while (!consumerChanges.isEmpty()){ CommandActiveConsumerChange change = consumerChanges.take(); if (change.getConsumerId() == consumerId){ res.set(change); return true; } } return false; }); consumerChanges.clear(); return res.get(); } @Test public void testAddRemoveConsumerNonPartitionedTopic() throws Exception { log.info("--- Starting PersistentDispatcherFailoverConsumerTest::testAddRemoveConsumerNonPartitionedTopic ---"); String[] sortedConsumerNameByHashSelector = sortConsumerNameByHashSelector("Cons1", "Cons2"); BrokerService spyBrokerService = pulsarTestContext.getBrokerService(); @Cleanup("shutdownNow") final EventLoopGroup singleEventLoopGroup = EventLoopUtil.newEventLoopGroup(1, pulsarTestContext.getBrokerService().getPulsar().getConfig().isEnableBusyWait(), new DefaultThreadFactory("pulsar-io")); doAnswer(invocation -> singleEventLoopGroup).when(spyBrokerService).executor(); PersistentTopic topic = new PersistentTopic(successTopicName, ledgerMock, pulsarTestContext.getBrokerService()); PersistentSubscription sub = new PersistentSubscription(topic, "sub-1", cursorMock, false); // Non partitioned topic. int partitionIndex = -1; PersistentDispatcherSingleActiveConsumer pdfc = new PersistentDispatcherSingleActiveConsumer(cursorMock, SubType.Failover, partitionIndex, topic, sub); // 1. Verify no consumers connected assertFalse(pdfc.isConsumerConnected()); // 2. Add a consumer Consumer consumer1 = spy(new Consumer(sub, SubType.Failover, topic.getName(), 1 /* consumer id */, 1, sortedConsumerNameByHashSelector[0]/* consumer name */, true, serverCnx, "myrole-1", Collections.emptyMap(), false /* read compacted */, null, MessageId.latest, DEFAULT_CONSUMER_EPOCH)); pdfc.addConsumer(consumer1); List<Consumer> consumers = pdfc.getConsumers(); assertEquals(1, consumers.size()); assertSame(pdfc.getActiveConsumer().consumerName(), consumer1.consumerName()); waitActiveChangeEvent(1); // 3. Add a consumer with same priority level and consumer name is smaller in lexicographic order. Consumer consumer2 = spy(new Consumer(sub, SubType.Failover, topic.getName(), 2 /* consumer id */, 1, sortedConsumerNameByHashSelector[1]/* consumer name */, true, serverCnx, "myrole-1", Collections.emptyMap(), false /* read compacted */, null, MessageId.latest, DEFAULT_CONSUMER_EPOCH)); pdfc.addConsumer(consumer2); // 4. Verify active consumer doesn't change consumers = pdfc.getConsumers(); assertEquals(2, consumers.size()); CommandActiveConsumerChange change = waitActiveChangeEvent(2); verifyActiveConsumerChange(change, 2, false); assertSame(pdfc.getActiveConsumer().consumerName(), consumer1.consumerName()); verify(consumer2, times(1)).notifyActiveConsumerChange(same(consumer1)); // 5. Add another consumer which has higher priority level Consumer consumer3 = spy(new Consumer(sub, SubType.Failover, topic.getName(), 3 /* consumer id */, 0, "Cons3"/* consumer name */, true, serverCnx, "myrole-1", Collections.emptyMap(), false /* read compacted */, null, MessageId.latest, DEFAULT_CONSUMER_EPOCH)); pdfc.addConsumer(consumer3); consumers = pdfc.getConsumers(); assertEquals(3, consumers.size()); change = waitActiveChangeEvent(3); verifyActiveConsumerChange(change, 3, true); assertSame(pdfc.getActiveConsumer().consumerName(), consumer3.consumerName()); verify(consumer3, times(1)).notifyActiveConsumerChange(same(consumer3)); } @Test public void testMultipleDispatcherGetNextConsumerWithDifferentPriorityLevel() throws Exception { PersistentTopic topic = new PersistentTopic(successTopicName, ledgerMock, pulsarTestContext.getBrokerService()); AbstractPersistentDispatcherMultipleConsumers dispatcher = new PersistentDispatcherMultipleConsumers(topic, cursorMock, null); Consumer consumer1 = createConsumer(topic, 0, 2, false, 1); Consumer consumer2 = createConsumer(topic, 0, 2, false, 2); Consumer consumer3 = createConsumer(topic, 0, 2, false, 3); Consumer consumer4 = createConsumer(topic, 1, 2, false, 4); Consumer consumer5 = createConsumer(topic, 1, 1, false, 5); Consumer consumer6 = createConsumer(topic, 1, 2, false, 6); Consumer consumer7 = createConsumer(topic, 2, 1, false, 7); Consumer consumer8 = createConsumer(topic, 2, 1, false, 8); Consumer consumer9 = createConsumer(topic, 2, 1, false, 9); dispatcher.addConsumer(consumer1); dispatcher.addConsumer(consumer2); dispatcher.addConsumer(consumer3); dispatcher.addConsumer(consumer4); dispatcher.addConsumer(consumer5); dispatcher.addConsumer(consumer6); dispatcher.addConsumer(consumer7); dispatcher.addConsumer(consumer8); dispatcher.addConsumer(consumer9); Assert.assertEquals(getNextConsumer(dispatcher), consumer1); Assert.assertEquals(getNextConsumer(dispatcher), consumer2); Assert.assertEquals(getNextConsumer(dispatcher), consumer3); Assert.assertEquals(getNextConsumer(dispatcher), consumer1); Assert.assertEquals(getNextConsumer(dispatcher), consumer2); Assert.assertEquals(getNextConsumer(dispatcher), consumer3); Assert.assertEquals(getNextConsumer(dispatcher), consumer4); Assert.assertEquals(getNextConsumer(dispatcher), consumer5); Assert.assertEquals(getNextConsumer(dispatcher), consumer6); Assert.assertEquals(getNextConsumer(dispatcher), consumer4); Assert.assertEquals(getNextConsumer(dispatcher), consumer6); Assert.assertEquals(getNextConsumer(dispatcher), consumer7); Assert.assertEquals(getNextConsumer(dispatcher), consumer8); // in between add upper priority consumer with more permits Consumer consumer10 = createConsumer(topic, 0, 2, false, 10); dispatcher.addConsumer(consumer10); Assert.assertEquals(getNextConsumer(dispatcher), consumer10); Assert.assertEquals(getNextConsumer(dispatcher), consumer10); Assert.assertEquals(getNextConsumer(dispatcher), consumer9); } @Test public void testFewBlockedConsumerSamePriority() throws Exception{ PersistentTopic topic = new PersistentTopic(successTopicName, ledgerMock, pulsarTestContext.getBrokerService()); AbstractPersistentDispatcherMultipleConsumers dispatcher = new PersistentDispatcherMultipleConsumers(topic, cursorMock, null); Consumer consumer1 = createConsumer(topic, 0, 2, false, 1); Consumer consumer2 = createConsumer(topic, 0, 2, false, 2); Consumer consumer3 = createConsumer(topic, 0, 2, false, 3); Consumer consumer4 = createConsumer(topic, 0, 2, false, 4); Consumer consumer5 = createConsumer(topic, 0, 1, true, 5); Consumer consumer6 = createConsumer(topic, 0, 2, true, 6); dispatcher.addConsumer(consumer1); dispatcher.addConsumer(consumer2); dispatcher.addConsumer(consumer3); dispatcher.addConsumer(consumer4); dispatcher.addConsumer(consumer5); dispatcher.addConsumer(consumer6); Assert.assertEquals(getNextConsumer(dispatcher), consumer1); Assert.assertEquals(getNextConsumer(dispatcher), consumer2); Assert.assertEquals(getNextConsumer(dispatcher), consumer3); Assert.assertEquals(getNextConsumer(dispatcher), consumer4); Assert.assertEquals(getNextConsumer(dispatcher), consumer1); Assert.assertEquals(getNextConsumer(dispatcher), consumer2); Assert.assertEquals(getNextConsumer(dispatcher), consumer3); Assert.assertEquals(getNextConsumer(dispatcher), consumer4); assertNull(getNextConsumer(dispatcher)); } @Test public void testFewBlockedConsumerDifferentPriority() throws Exception { PersistentTopic topic = new PersistentTopic(successTopicName, ledgerMock, pulsarTestContext.getBrokerService()); AbstractPersistentDispatcherMultipleConsumers dispatcher = new PersistentDispatcherMultipleConsumers(topic, cursorMock, null); Consumer consumer1 = createConsumer(topic, 0, 2, false, 1); Consumer consumer2 = createConsumer(topic, 0, 2, false, 2); Consumer consumer3 = createConsumer(topic, 0, 2, false, 3); Consumer consumer4 = createConsumer(topic, 0, 2, false, 4); Consumer consumer5 = createConsumer(topic, 0, 1, true, 5); Consumer consumer6 = createConsumer(topic, 0, 2, true, 6); Consumer consumer7 = createConsumer(topic, 1, 2, false, 7); Consumer consumer8 = createConsumer(topic, 1, 10, true, 8); Consumer consumer9 = createConsumer(topic, 1, 2, false, 9); Consumer consumer10 = createConsumer(topic, 2, 2, false, 10); Consumer consumer11 = createConsumer(topic, 2, 10, true, 11); Consumer consumer12 = createConsumer(topic, 2, 2, false, 12); dispatcher.addConsumer(consumer1); dispatcher.addConsumer(consumer2); dispatcher.addConsumer(consumer3); dispatcher.addConsumer(consumer4); dispatcher.addConsumer(consumer5); dispatcher.addConsumer(consumer6); dispatcher.addConsumer(consumer7); dispatcher.addConsumer(consumer8); dispatcher.addConsumer(consumer9); dispatcher.addConsumer(consumer10); dispatcher.addConsumer(consumer11); dispatcher.addConsumer(consumer12); Assert.assertEquals(getNextConsumer(dispatcher), consumer1); Assert.assertEquals(getNextConsumer(dispatcher), consumer2); Assert.assertEquals(getNextConsumer(dispatcher), consumer3); Assert.assertEquals(getNextConsumer(dispatcher), consumer4); Assert.assertEquals(getNextConsumer(dispatcher), consumer1); Assert.assertEquals(getNextConsumer(dispatcher), consumer2); Assert.assertEquals(getNextConsumer(dispatcher), consumer3); Assert.assertEquals(getNextConsumer(dispatcher), consumer4); Assert.assertEquals(getNextConsumer(dispatcher), consumer7); Assert.assertEquals(getNextConsumer(dispatcher), consumer9); Assert.assertEquals(getNextConsumer(dispatcher), consumer7); Assert.assertEquals(getNextConsumer(dispatcher), consumer9); Assert.assertEquals(getNextConsumer(dispatcher), consumer10); Assert.assertEquals(getNextConsumer(dispatcher), consumer12); // add consumer with lower priority again Consumer consumer13 = createConsumer(topic, 0, 2, false, 13); Consumer consumer14 = createConsumer(topic, 0, 2, true, 14); dispatcher.addConsumer(consumer13); dispatcher.addConsumer(consumer14); Assert.assertEquals(getNextConsumer(dispatcher), consumer13); Assert.assertEquals(getNextConsumer(dispatcher), consumer13); Assert.assertEquals(getNextConsumer(dispatcher), consumer10); Assert.assertEquals(getNextConsumer(dispatcher), consumer12); assertNull(getNextConsumer(dispatcher)); } @Test public void testFewBlockedConsumerDifferentPriority2() throws Exception { PersistentTopic topic = new PersistentTopic(successTopicName, ledgerMock, pulsarTestContext.getBrokerService()); AbstractPersistentDispatcherMultipleConsumers dispatcher = new PersistentDispatcherMultipleConsumers(topic, cursorMock, null); Consumer consumer1 = createConsumer(topic, 0, 2, true, 1); Consumer consumer2 = createConsumer(topic, 0, 2, true, 2); Consumer consumer3 = createConsumer(topic, 0, 2, true, 3); Consumer consumer4 = createConsumer(topic, 1, 2, false, 4); Consumer consumer5 = createConsumer(topic, 1, 1, false, 5); Consumer consumer6 = createConsumer(topic, 2, 1, false, 6); Consumer consumer7 = createConsumer(topic, 2, 2, true, 7); dispatcher.addConsumer(consumer1); dispatcher.addConsumer(consumer2); dispatcher.addConsumer(consumer3); dispatcher.addConsumer(consumer4); dispatcher.addConsumer(consumer5); dispatcher.addConsumer(consumer6); dispatcher.addConsumer(consumer7); Assert.assertEquals(getNextConsumer(dispatcher), consumer4); Assert.assertEquals(getNextConsumer(dispatcher), consumer5); Assert.assertEquals(getNextConsumer(dispatcher), consumer4); Assert.assertEquals(getNextConsumer(dispatcher), consumer6); assertNull(getNextConsumer(dispatcher)); } @SuppressWarnings("unchecked") private Consumer getNextConsumer(AbstractPersistentDispatcherMultipleConsumers dispatcher) throws Exception { Consumer consumer = dispatcher.getNextConsumer(); if (consumer != null) { Field field = Consumer.class.getDeclaredField("MESSAGE_PERMITS_UPDATER"); field.setAccessible(true); AtomicIntegerFieldUpdater<Consumer> messagePermits = (AtomicIntegerFieldUpdater<Consumer>) field.get(consumer); messagePermits.decrementAndGet(consumer); return consumer; } return null; } private Consumer createConsumer(PersistentTopic topic, int priority, int permit, boolean blocked, int id) throws Exception { PersistentSubscription sub = new PersistentSubscription(topic, "sub-1", cursorMock, false); Consumer consumer = new Consumer(sub, SubType.Shared, "test-topic", id, priority, "" + id, true, serverCnx, "appId", Collections.emptyMap(), false /* read compacted */, null, MessageId.latest, DEFAULT_CONSUMER_EPOCH); try { consumer.flowPermits(permit); } catch (Exception e) { } // set consumer blocked flag Field blockField = Consumer.class.getDeclaredField("blockedConsumerOnUnackedMsgs"); blockField.setAccessible(true); blockField.set(consumer, blocked); return consumer; } private static final Logger log = LoggerFactory.getLogger(PersistentDispatcherFailoverConsumerTest.class); }
openjdk/jdk8
36,959
jdk/src/share/classes/java/time/chrono/ChronoLocalDate.java
/* * Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * This file is available under and governed by the GNU General Public * License version 2 only, as published by the Free Software Foundation. * However, the following notice accompanied the original version of this * file: * * Copyright (c) 2012, Stephen Colebourne & Michael Nascimento Santos * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * * Neither the name of JSR-310 nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package java.time.chrono; import static java.time.temporal.ChronoField.EPOCH_DAY; import static java.time.temporal.ChronoField.ERA; import static java.time.temporal.ChronoField.YEAR; import static java.time.temporal.ChronoUnit.DAYS; import java.time.DateTimeException; import java.time.LocalDate; import java.time.LocalTime; import java.time.format.DateTimeFormatter; import java.time.temporal.ChronoField; import java.time.temporal.ChronoUnit; import java.time.temporal.Temporal; import java.time.temporal.TemporalAccessor; import java.time.temporal.TemporalAdjuster; import java.time.temporal.TemporalAmount; import java.time.temporal.TemporalField; import java.time.temporal.TemporalQueries; import java.time.temporal.TemporalQuery; import java.time.temporal.TemporalUnit; import java.time.temporal.UnsupportedTemporalTypeException; import java.util.Comparator; import java.util.Objects; /** * A date without time-of-day or time-zone in an arbitrary chronology, intended * for advanced globalization use cases. * <p> * <b>Most applications should declare method signatures, fields and variables * as {@link LocalDate}, not this interface.</b> * <p> * A {@code ChronoLocalDate} is the abstract representation of a date where the * {@code Chronology chronology}, or calendar system, is pluggable. * The date is defined in terms of fields expressed by {@link TemporalField}, * where most common implementations are defined in {@link ChronoField}. * The chronology defines how the calendar system operates and the meaning of * the standard fields. * * <h3>When to use this interface</h3> * The design of the API encourages the use of {@code LocalDate} rather than this * interface, even in the case where the application needs to deal with multiple * calendar systems. The rationale for this is explored in the following documentation. * <p> * The primary use case where this interface should be used is where the generic * type parameter {@code <D>} is fully defined as a specific chronology. * In that case, the assumptions of that chronology are known at development * time and specified in the code. * <p> * When the chronology is defined in the generic type parameter as ? or otherwise * unknown at development time, the rest of the discussion below applies. * <p> * To emphasize the point, declaring a method signature, field or variable as this * interface type can initially seem like the sensible way to globalize an application, * however it is usually the wrong approach. * As such, it should be considered an application-wide architectural decision to choose * to use this interface as opposed to {@code LocalDate}. * * <h3>Architectural issues to consider</h3> * These are some of the points that must be considered before using this interface * throughout an application. * <p> * 1) Applications using this interface, as opposed to using just {@code LocalDate}, * face a significantly higher probability of bugs. This is because the calendar system * in use is not known at development time. A key cause of bugs is where the developer * applies assumptions from their day-to-day knowledge of the ISO calendar system * to code that is intended to deal with any arbitrary calendar system. * The section below outlines how those assumptions can cause problems * The primary mechanism for reducing this increased risk of bugs is a strong code review process. * This should also be considered a extra cost in maintenance for the lifetime of the code. * <p> * 2) This interface does not enforce immutability of implementations. * While the implementation notes indicate that all implementations must be immutable * there is nothing in the code or type system to enforce this. Any method declared * to accept a {@code ChronoLocalDate} could therefore be passed a poorly or * maliciously written mutable implementation. * <p> * 3) Applications using this interface must consider the impact of eras. * {@code LocalDate} shields users from the concept of eras, by ensuring that {@code getYear()} * returns the proleptic year. That decision ensures that developers can think of * {@code LocalDate} instances as consisting of three fields - year, month-of-year and day-of-month. * By contrast, users of this interface must think of dates as consisting of four fields - * era, year-of-era, month-of-year and day-of-month. The extra era field is frequently * forgotten, yet it is of vital importance to dates in an arbitrary calendar system. * For example, in the Japanese calendar system, the era represents the reign of an Emperor. * Whenever one reign ends and another starts, the year-of-era is reset to one. * <p> * 4) The only agreed international standard for passing a date between two systems * is the ISO-8601 standard which requires the ISO calendar system. Using this interface * throughout the application will inevitably lead to the requirement to pass the date * across a network or component boundary, requiring an application specific protocol or format. * <p> * 5) Long term persistence, such as a database, will almost always only accept dates in the * ISO-8601 calendar system (or the related Julian-Gregorian). Passing around dates in other * calendar systems increases the complications of interacting with persistence. * <p> * 6) Most of the time, passing a {@code ChronoLocalDate} throughout an application * is unnecessary, as discussed in the last section below. * * <h3>False assumptions causing bugs in multi-calendar system code</h3> * As indicated above, there are many issues to consider when try to use and manipulate a * date in an arbitrary calendar system. These are some of the key issues. * <p> * Code that queries the day-of-month and assumes that the value will never be more than * 31 is invalid. Some calendar systems have more than 31 days in some months. * <p> * Code that adds 12 months to a date and assumes that a year has been added is invalid. * Some calendar systems have a different number of months, such as 13 in the Coptic or Ethiopic. * <p> * Code that adds one month to a date and assumes that the month-of-year value will increase * by one or wrap to the next year is invalid. Some calendar systems have a variable number * of months in a year, such as the Hebrew. * <p> * Code that adds one month, then adds a second one month and assumes that the day-of-month * will remain close to its original value is invalid. Some calendar systems have a large difference * between the length of the longest month and the length of the shortest month. * For example, the Coptic or Ethiopic have 12 months of 30 days and 1 month of 5 days. * <p> * Code that adds seven days and assumes that a week has been added is invalid. * Some calendar systems have weeks of other than seven days, such as the French Revolutionary. * <p> * Code that assumes that because the year of {@code date1} is greater than the year of {@code date2} * then {@code date1} is after {@code date2} is invalid. This is invalid for all calendar systems * when referring to the year-of-era, and especially untrue of the Japanese calendar system * where the year-of-era restarts with the reign of every new Emperor. * <p> * Code that treats month-of-year one and day-of-month one as the start of the year is invalid. * Not all calendar systems start the year when the month value is one. * <p> * In general, manipulating a date, and even querying a date, is wide open to bugs when the * calendar system is unknown at development time. This is why it is essential that code using * this interface is subjected to additional code reviews. It is also why an architectural * decision to avoid this interface type is usually the correct one. * * <h3>Using LocalDate instead</h3> * The primary alternative to using this interface throughout your application is as follows. * <ul> * <li>Declare all method signatures referring to dates in terms of {@code LocalDate}. * <li>Either store the chronology (calendar system) in the user profile or lookup * the chronology from the user locale * <li>Convert the ISO {@code LocalDate} to and from the user's preferred calendar system during * printing and parsing * </ul> * This approach treats the problem of globalized calendar systems as a localization issue * and confines it to the UI layer. This approach is in keeping with other localization * issues in the java platform. * <p> * As discussed above, performing calculations on a date where the rules of the calendar system * are pluggable requires skill and is not recommended. * Fortunately, the need to perform calculations on a date in an arbitrary calendar system * is extremely rare. For example, it is highly unlikely that the business rules of a library * book rental scheme will allow rentals to be for one month, where meaning of the month * is dependent on the user's preferred calendar system. * <p> * A key use case for calculations on a date in an arbitrary calendar system is producing * a month-by-month calendar for display and user interaction. Again, this is a UI issue, * and use of this interface solely within a few methods of the UI layer may be justified. * <p> * In any other part of the system, where a date must be manipulated in a calendar system * other than ISO, the use case will generally specify the calendar system to use. * For example, an application may need to calculate the next Islamic or Hebrew holiday * which may require manipulating the date. * This kind of use case can be handled as follows: * <ul> * <li>start from the ISO {@code LocalDate} being passed to the method * <li>convert the date to the alternate calendar system, which for this use case is known * rather than arbitrary * <li>perform the calculation * <li>convert back to {@code LocalDate} * </ul> * Developers writing low-level frameworks or libraries should also avoid this interface. * Instead, one of the two general purpose access interfaces should be used. * Use {@link TemporalAccessor} if read-only access is required, or use {@link Temporal} * if read-write access is required. * * @implSpec * This interface must be implemented with care to ensure other classes operate correctly. * All implementations that can be instantiated must be final, immutable and thread-safe. * Subclasses should be Serializable wherever possible. * <p> * Additional calendar systems may be added to the system. * See {@link Chronology} for more details. * * @since 1.8 */ public interface ChronoLocalDate extends Temporal, TemporalAdjuster, Comparable<ChronoLocalDate> { /** * Gets a comparator that compares {@code ChronoLocalDate} in * time-line order ignoring the chronology. * <p> * This comparator differs from the comparison in {@link #compareTo} in that it * only compares the underlying date and not the chronology. * This allows dates in different calendar systems to be compared based * on the position of the date on the local time-line. * The underlying comparison is equivalent to comparing the epoch-day. * @return a comparator that compares in time-line order ignoring the chronology * * @see #isAfter * @see #isBefore * @see #isEqual */ static Comparator<ChronoLocalDate> timeLineOrder() { return AbstractChronology.DATE_ORDER; } //----------------------------------------------------------------------- /** * Obtains an instance of {@code ChronoLocalDate} from a temporal object. * <p> * This obtains a local date based on the specified temporal. * A {@code TemporalAccessor} represents an arbitrary set of date and time information, * which this factory converts to an instance of {@code ChronoLocalDate}. * <p> * The conversion extracts and combines the chronology and the date * from the temporal object. The behavior is equivalent to using * {@link Chronology#date(TemporalAccessor)} with the extracted chronology. * Implementations are permitted to perform optimizations such as accessing * those fields that are equivalent to the relevant objects. * <p> * This method matches the signature of the functional interface {@link TemporalQuery} * allowing it to be used as a query via method reference, {@code ChronoLocalDate::from}. * * @param temporal the temporal object to convert, not null * @return the date, not null * @throws DateTimeException if unable to convert to a {@code ChronoLocalDate} * @see Chronology#date(TemporalAccessor) */ static ChronoLocalDate from(TemporalAccessor temporal) { if (temporal instanceof ChronoLocalDate) { return (ChronoLocalDate) temporal; } Objects.requireNonNull(temporal, "temporal"); Chronology chrono = temporal.query(TemporalQueries.chronology()); if (chrono == null) { throw new DateTimeException("Unable to obtain ChronoLocalDate from TemporalAccessor: " + temporal.getClass()); } return chrono.date(temporal); } //----------------------------------------------------------------------- /** * Gets the chronology of this date. * <p> * The {@code Chronology} represents the calendar system in use. * The era and other fields in {@link ChronoField} are defined by the chronology. * * @return the chronology, not null */ Chronology getChronology(); /** * Gets the era, as defined by the chronology. * <p> * The era is, conceptually, the largest division of the time-line. * Most calendar systems have a single epoch dividing the time-line into two eras. * However, some have multiple eras, such as one for the reign of each leader. * The exact meaning is determined by the {@code Chronology}. * <p> * All correctly implemented {@code Era} classes are singletons, thus it * is valid code to write {@code date.getEra() == SomeChrono.ERA_NAME)}. * <p> * This default implementation uses {@link Chronology#eraOf(int)}. * * @return the chronology specific era constant applicable at this date, not null */ default Era getEra() { return getChronology().eraOf(get(ERA)); } /** * Checks if the year is a leap year, as defined by the calendar system. * <p> * A leap-year is a year of a longer length than normal. * The exact meaning is determined by the chronology with the constraint that * a leap-year must imply a year-length longer than a non leap-year. * <p> * This default implementation uses {@link Chronology#isLeapYear(long)}. * * @return true if this date is in a leap year, false otherwise */ default boolean isLeapYear() { return getChronology().isLeapYear(getLong(YEAR)); } /** * Returns the length of the month represented by this date, as defined by the calendar system. * <p> * This returns the length of the month in days. * * @return the length of the month in days */ int lengthOfMonth(); /** * Returns the length of the year represented by this date, as defined by the calendar system. * <p> * This returns the length of the year in days. * <p> * The default implementation uses {@link #isLeapYear()} and returns 365 or 366. * * @return the length of the year in days */ default int lengthOfYear() { return (isLeapYear() ? 366 : 365); } /** * Checks if the specified field is supported. * <p> * This checks if the specified field can be queried on this date. * If false, then calling the {@link #range(TemporalField) range}, * {@link #get(TemporalField) get} and {@link #with(TemporalField, long)} * methods will throw an exception. * <p> * The set of supported fields is defined by the chronology and normally includes * all {@code ChronoField} date fields. * <p> * If the field is not a {@code ChronoField}, then the result of this method * is obtained by invoking {@code TemporalField.isSupportedBy(TemporalAccessor)} * passing {@code this} as the argument. * Whether the field is supported is determined by the field. * * @param field the field to check, null returns false * @return true if the field can be queried, false if not */ @Override default boolean isSupported(TemporalField field) { if (field instanceof ChronoField) { return field.isDateBased(); } return field != null && field.isSupportedBy(this); } /** * Checks if the specified unit is supported. * <p> * This checks if the specified unit can be added to or subtracted from this date. * If false, then calling the {@link #plus(long, TemporalUnit)} and * {@link #minus(long, TemporalUnit) minus} methods will throw an exception. * <p> * The set of supported units is defined by the chronology and normally includes * all {@code ChronoUnit} date units except {@code FOREVER}. * <p> * If the unit is not a {@code ChronoUnit}, then the result of this method * is obtained by invoking {@code TemporalUnit.isSupportedBy(Temporal)} * passing {@code this} as the argument. * Whether the unit is supported is determined by the unit. * * @param unit the unit to check, null returns false * @return true if the unit can be added/subtracted, false if not */ @Override default boolean isSupported(TemporalUnit unit) { if (unit instanceof ChronoUnit) { return unit.isDateBased(); } return unit != null && unit.isSupportedBy(this); } //----------------------------------------------------------------------- // override for covariant return type /** * {@inheritDoc} * @throws DateTimeException {@inheritDoc} * @throws ArithmeticException {@inheritDoc} */ @Override default ChronoLocalDate with(TemporalAdjuster adjuster) { return ChronoLocalDateImpl.ensureValid(getChronology(), Temporal.super.with(adjuster)); } /** * {@inheritDoc} * @throws DateTimeException {@inheritDoc} * @throws UnsupportedTemporalTypeException {@inheritDoc} * @throws ArithmeticException {@inheritDoc} */ @Override default ChronoLocalDate with(TemporalField field, long newValue) { if (field instanceof ChronoField) { throw new UnsupportedTemporalTypeException("Unsupported field: " + field); } return ChronoLocalDateImpl.ensureValid(getChronology(), field.adjustInto(this, newValue)); } /** * {@inheritDoc} * @throws DateTimeException {@inheritDoc} * @throws ArithmeticException {@inheritDoc} */ @Override default ChronoLocalDate plus(TemporalAmount amount) { return ChronoLocalDateImpl.ensureValid(getChronology(), Temporal.super.plus(amount)); } /** * {@inheritDoc} * @throws DateTimeException {@inheritDoc} * @throws ArithmeticException {@inheritDoc} */ @Override default ChronoLocalDate plus(long amountToAdd, TemporalUnit unit) { if (unit instanceof ChronoUnit) { throw new UnsupportedTemporalTypeException("Unsupported unit: " + unit); } return ChronoLocalDateImpl.ensureValid(getChronology(), unit.addTo(this, amountToAdd)); } /** * {@inheritDoc} * @throws DateTimeException {@inheritDoc} * @throws ArithmeticException {@inheritDoc} */ @Override default ChronoLocalDate minus(TemporalAmount amount) { return ChronoLocalDateImpl.ensureValid(getChronology(), Temporal.super.minus(amount)); } /** * {@inheritDoc} * @throws DateTimeException {@inheritDoc} * @throws UnsupportedTemporalTypeException {@inheritDoc} * @throws ArithmeticException {@inheritDoc} */ @Override default ChronoLocalDate minus(long amountToSubtract, TemporalUnit unit) { return ChronoLocalDateImpl.ensureValid(getChronology(), Temporal.super.minus(amountToSubtract, unit)); } //----------------------------------------------------------------------- /** * Queries this date using the specified query. * <p> * This queries this date using the specified query strategy object. * The {@code TemporalQuery} object defines the logic to be used to * obtain the result. Read the documentation of the query to understand * what the result of this method will be. * <p> * The result of this method is obtained by invoking the * {@link TemporalQuery#queryFrom(TemporalAccessor)} method on the * specified query passing {@code this} as the argument. * * @param <R> the type of the result * @param query the query to invoke, not null * @return the query result, null may be returned (defined by the query) * @throws DateTimeException if unable to query (defined by the query) * @throws ArithmeticException if numeric overflow occurs (defined by the query) */ @SuppressWarnings("unchecked") @Override default <R> R query(TemporalQuery<R> query) { if (query == TemporalQueries.zoneId() || query == TemporalQueries.zone() || query == TemporalQueries.offset()) { return null; } else if (query == TemporalQueries.localTime()) { return null; } else if (query == TemporalQueries.chronology()) { return (R) getChronology(); } else if (query == TemporalQueries.precision()) { return (R) DAYS; } // inline TemporalAccessor.super.query(query) as an optimization // non-JDK classes are not permitted to make this optimization return query.queryFrom(this); } /** * Adjusts the specified temporal object to have the same date as this object. * <p> * This returns a temporal object of the same observable type as the input * with the date changed to be the same as this. * <p> * The adjustment is equivalent to using {@link Temporal#with(TemporalField, long)} * passing {@link ChronoField#EPOCH_DAY} as the field. * <p> * In most cases, it is clearer to reverse the calling pattern by using * {@link Temporal#with(TemporalAdjuster)}: * <pre> * // these two lines are equivalent, but the second approach is recommended * temporal = thisLocalDate.adjustInto(temporal); * temporal = temporal.with(thisLocalDate); * </pre> * <p> * This instance is immutable and unaffected by this method call. * * @param temporal the target object to be adjusted, not null * @return the adjusted object, not null * @throws DateTimeException if unable to make the adjustment * @throws ArithmeticException if numeric overflow occurs */ @Override default Temporal adjustInto(Temporal temporal) { return temporal.with(EPOCH_DAY, toEpochDay()); } /** * Calculates the amount of time until another date in terms of the specified unit. * <p> * This calculates the amount of time between two {@code ChronoLocalDate} * objects in terms of a single {@code TemporalUnit}. * The start and end points are {@code this} and the specified date. * The result will be negative if the end is before the start. * The {@code Temporal} passed to this method is converted to a * {@code ChronoLocalDate} using {@link Chronology#date(TemporalAccessor)}. * The calculation returns a whole number, representing the number of * complete units between the two dates. * For example, the amount in days between two dates can be calculated * using {@code startDate.until(endDate, DAYS)}. * <p> * There are two equivalent ways of using this method. * The first is to invoke this method. * The second is to use {@link TemporalUnit#between(Temporal, Temporal)}: * <pre> * // these two lines are equivalent * amount = start.until(end, MONTHS); * amount = MONTHS.between(start, end); * </pre> * The choice should be made based on which makes the code more readable. * <p> * The calculation is implemented in this method for {@link ChronoUnit}. * The units {@code DAYS}, {@code WEEKS}, {@code MONTHS}, {@code YEARS}, * {@code DECADES}, {@code CENTURIES}, {@code MILLENNIA} and {@code ERAS} * should be supported by all implementations. * Other {@code ChronoUnit} values will throw an exception. * <p> * If the unit is not a {@code ChronoUnit}, then the result of this method * is obtained by invoking {@code TemporalUnit.between(Temporal, Temporal)} * passing {@code this} as the first argument and the converted input temporal as * the second argument. * <p> * This instance is immutable and unaffected by this method call. * * @param endExclusive the end date, exclusive, which is converted to a * {@code ChronoLocalDate} in the same chronology, not null * @param unit the unit to measure the amount in, not null * @return the amount of time between this date and the end date * @throws DateTimeException if the amount cannot be calculated, or the end * temporal cannot be converted to a {@code ChronoLocalDate} * @throws UnsupportedTemporalTypeException if the unit is not supported * @throws ArithmeticException if numeric overflow occurs */ @Override // override for Javadoc long until(Temporal endExclusive, TemporalUnit unit); /** * Calculates the period between this date and another date as a {@code ChronoPeriod}. * <p> * This calculates the period between two dates. All supplied chronologies * calculate the period using years, months and days, however the * {@code ChronoPeriod} API allows the period to be represented using other units. * <p> * The start and end points are {@code this} and the specified date. * The result will be negative if the end is before the start. * The negative sign will be the same in each of year, month and day. * <p> * The calculation is performed using the chronology of this date. * If necessary, the input date will be converted to match. * <p> * This instance is immutable and unaffected by this method call. * * @param endDateExclusive the end date, exclusive, which may be in any chronology, not null * @return the period between this date and the end date, not null * @throws DateTimeException if the period cannot be calculated * @throws ArithmeticException if numeric overflow occurs */ ChronoPeriod until(ChronoLocalDate endDateExclusive); /** * Formats this date using the specified formatter. * <p> * This date will be passed to the formatter to produce a string. * <p> * The default implementation must behave as follows: * <pre> * return formatter.format(this); * </pre> * * @param formatter the formatter to use, not null * @return the formatted date string, not null * @throws DateTimeException if an error occurs during printing */ default String format(DateTimeFormatter formatter) { Objects.requireNonNull(formatter, "formatter"); return formatter.format(this); } //----------------------------------------------------------------------- /** * Combines this date with a time to create a {@code ChronoLocalDateTime}. * <p> * This returns a {@code ChronoLocalDateTime} formed from this date at the specified time. * All possible combinations of date and time are valid. * * @param localTime the local time to use, not null * @return the local date-time formed from this date and the specified time, not null */ @SuppressWarnings("unchecked") default ChronoLocalDateTime<?> atTime(LocalTime localTime) { return ChronoLocalDateTimeImpl.of(this, localTime); } //----------------------------------------------------------------------- /** * Converts this date to the Epoch Day. * <p> * The {@link ChronoField#EPOCH_DAY Epoch Day count} is a simple * incrementing count of days where day 0 is 1970-01-01 (ISO). * This definition is the same for all chronologies, enabling conversion. * <p> * This default implementation queries the {@code EPOCH_DAY} field. * * @return the Epoch Day equivalent to this date */ default long toEpochDay() { return getLong(EPOCH_DAY); } //----------------------------------------------------------------------- /** * Compares this date to another date, including the chronology. * <p> * The comparison is based first on the underlying time-line date, then * on the chronology. * It is "consistent with equals", as defined by {@link Comparable}. * <p> * For example, the following is the comparator order: * <ol> * <li>{@code 2012-12-03 (ISO)}</li> * <li>{@code 2012-12-04 (ISO)}</li> * <li>{@code 2555-12-04 (ThaiBuddhist)}</li> * <li>{@code 2012-12-05 (ISO)}</li> * </ol> * Values #2 and #3 represent the same date on the time-line. * When two values represent the same date, the chronology ID is compared to distinguish them. * This step is needed to make the ordering "consistent with equals". * <p> * If all the date objects being compared are in the same chronology, then the * additional chronology stage is not required and only the local date is used. * To compare the dates of two {@code TemporalAccessor} instances, including dates * in two different chronologies, use {@link ChronoField#EPOCH_DAY} as a comparator. * <p> * This default implementation performs the comparison defined above. * * @param other the other date to compare to, not null * @return the comparator value, negative if less, positive if greater */ @Override default int compareTo(ChronoLocalDate other) { int cmp = Long.compare(toEpochDay(), other.toEpochDay()); if (cmp == 0) { cmp = getChronology().compareTo(other.getChronology()); } return cmp; } /** * Checks if this date is after the specified date ignoring the chronology. * <p> * This method differs from the comparison in {@link #compareTo} in that it * only compares the underlying date and not the chronology. * This allows dates in different calendar systems to be compared based * on the time-line position. * This is equivalent to using {@code date1.toEpochDay() &gt; date2.toEpochDay()}. * <p> * This default implementation performs the comparison based on the epoch-day. * * @param other the other date to compare to, not null * @return true if this is after the specified date */ default boolean isAfter(ChronoLocalDate other) { return this.toEpochDay() > other.toEpochDay(); } /** * Checks if this date is before the specified date ignoring the chronology. * <p> * This method differs from the comparison in {@link #compareTo} in that it * only compares the underlying date and not the chronology. * This allows dates in different calendar systems to be compared based * on the time-line position. * This is equivalent to using {@code date1.toEpochDay() &lt; date2.toEpochDay()}. * <p> * This default implementation performs the comparison based on the epoch-day. * * @param other the other date to compare to, not null * @return true if this is before the specified date */ default boolean isBefore(ChronoLocalDate other) { return this.toEpochDay() < other.toEpochDay(); } /** * Checks if this date is equal to the specified date ignoring the chronology. * <p> * This method differs from the comparison in {@link #compareTo} in that it * only compares the underlying date and not the chronology. * This allows dates in different calendar systems to be compared based * on the time-line position. * This is equivalent to using {@code date1.toEpochDay() == date2.toEpochDay()}. * <p> * This default implementation performs the comparison based on the epoch-day. * * @param other the other date to compare to, not null * @return true if the underlying date is equal to the specified date */ default boolean isEqual(ChronoLocalDate other) { return this.toEpochDay() == other.toEpochDay(); } //----------------------------------------------------------------------- /** * Checks if this date is equal to another date, including the chronology. * <p> * Compares this date with another ensuring that the date and chronology are the same. * <p> * To compare the dates of two {@code TemporalAccessor} instances, including dates * in two different chronologies, use {@link ChronoField#EPOCH_DAY} as a comparator. * * @param obj the object to check, null returns false * @return true if this is equal to the other date */ @Override boolean equals(Object obj); /** * A hash code for this date. * * @return a suitable hash code */ @Override int hashCode(); //----------------------------------------------------------------------- /** * Outputs this date as a {@code String}. * <p> * The output will include the full local date. * * @return the formatted date, not null */ @Override String toString(); }
apache/eagle
36,643
eagle-core/eagle-alert-parent/eagle-alert/alert-engine/src/test/java/org/apache/eagle/alert/engine/interpreter/PolicyInterpreterTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.eagle.alert.engine.interpreter; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ArrayNode; import org.apache.eagle.alert.engine.coordinator.PolicyDefinition; import org.apache.eagle.alert.engine.coordinator.StreamColumn; import org.apache.eagle.alert.engine.coordinator.StreamDefinition; import org.apache.eagle.alert.engine.coordinator.StreamPartition; import org.apache.eagle.alert.engine.evaluator.PolicyStreamHandlers; import org.junit.Assert; import org.junit.Test; import io.siddhi.core.exception.DefinitionNotExistException; import io.siddhi.query.api.exception.SiddhiAppValidationException; import java.util.*; public class PolicyInterpreterTest { // ------------------------- // Single Stream Test Cases // ------------------------- @Test public void testParseSingleStreamPolicyQuery() throws Exception { PolicyExecutionPlan executionPlan = PolicyInterpreter.parseExecutionPlan("from HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX#window.externalTime(timestamp, 2 min) " + "select cmd, user, count() as total_count group by cmd,user insert into HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX_OUT"); Assert.assertEquals("HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX", executionPlan.getInputStreams().keySet().toArray()[0]); Assert.assertEquals("HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX_OUT", executionPlan.getOutputStreams().keySet().toArray()[0]); Assert.assertEquals(1, executionPlan.getStreamPartitions().size()); Assert.assertEquals(2*60*1000,executionPlan.getStreamPartitions().get(0).getSortSpec().getWindowPeriodMillis()); } @Test public void testParseSingleStreamPolicyWithPattern() throws Exception { PolicyExecutionPlan executionPlan = PolicyInterpreter.parseExecutionPlan( "from e1=Stream1[price >= 20] -> e2=Stream2[price >= e1.price] \n" + "select e1.symbol as symbol, e2.price as price, e1.price+e2.price as total_price \n" + "group by symbol, company insert into OutStream"); Assert.assertTrue(executionPlan.getInputStreams().containsKey("Stream1")); Assert.assertTrue(executionPlan.getInputStreams().containsKey("Stream2")); Assert.assertTrue(executionPlan.getOutputStreams().containsKey("OutStream")); Assert.assertEquals(StreamPartition.Type.GROUPBY,executionPlan.getStreamPartitions().get(0).getType()); Assert.assertArrayEquals(new String[]{"symbol","company"},executionPlan.getStreamPartitions().get(0).getColumns().toArray()); Assert.assertEquals(StreamPartition.Type.GROUPBY,executionPlan.getStreamPartitions().get(1).getType()); Assert.assertArrayEquals(new String[]{"symbol","company"},executionPlan.getStreamPartitions().get(1).getColumns().toArray()); } @Test public void testParseSingleStreamPolicyQueryWithMultiplePartitionUsingLargerWindow() throws Exception { PolicyExecutionPlan executionPlan = PolicyInterpreter.parseExecutionPlan( "from HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX#window.externalTime(timestamp, 1 min) " + "select cmd,user, count() as total_count group by cmd,user insert into HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX_OUT_1;" + "from HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX#window.externalTime(timestamp, 1 hour) " + "select cmd,user, count() as total_count group by cmd,user insert into HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX_OUT_2;" ); Assert.assertEquals("HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX", executionPlan.getInputStreams().keySet().toArray()[0]); Assert.assertTrue(executionPlan.getOutputStreams().containsKey("HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX_OUT_1")); Assert.assertTrue(executionPlan.getOutputStreams().containsKey("HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX_OUT_2")); Assert.assertEquals(1, executionPlan.getStreamPartitions().size()); Assert.assertEquals(60*60*1000, executionPlan.getStreamPartitions().get(0).getSortSpec().getWindowPeriodMillis()); } @Test(expected = SiddhiAppValidationException.class) public void testParseSingleStreamPolicyQueryWithConflictPartition() throws Exception { PolicyExecutionPlan executionPlan = PolicyInterpreter.parseExecutionPlan( "from HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX#window.externalTime(timestamp, 5 min) " + "select cmd, count() as total_count group by cmd,user insert into HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX_OUT_1;" + "from HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX#window.externalTime(timestamp, 2 min) " + "select user, count() as total_count group by cmd,user insert into HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX_OUT_2;" ); Assert.assertEquals("HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX", executionPlan.getInputStreams().keySet().toArray()[0]); Assert.assertTrue(executionPlan.getOutputStreams().containsKey("HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX_OUT_1")); Assert.assertTrue(executionPlan.getOutputStreams().containsKey("HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX_OUT_2")); Assert.assertEquals(2, executionPlan.getStreamPartitions().size()); Assert.assertEquals(5*60*1000, executionPlan.getStreamPartitions().get(0).getSortSpec().getWindowPeriodMillis()); } @Test public void testValidPolicyWithExternalTimeWindow() { PolicyDefinition policyDefinition = new PolicyDefinition(); policyDefinition.setName("test_policy"); policyDefinition.setInputStreams(Collections.singletonList("INPUT_STREAM_1")); policyDefinition.setOutputStreams(Collections.singletonList("OUTPUT_STREAM_1")); PolicyDefinition.Definition definition = new PolicyDefinition.Definition(); definition.setType("siddhi"); definition.setValue("from INPUT_STREAM_1#window.externalTime(timestamp, 2 min) select name, sum(value) as total group by name insert into OUTPUT_STREAM_1 ;"); definition.setInputStreams(policyDefinition.getInputStreams()); definition.setOutputStreams(policyDefinition.getOutputStreams()); policyDefinition.setDefinition(definition); PolicyValidationResult validation = PolicyInterpreter.validate(policyDefinition, new HashMap<String, StreamDefinition>() { { put("INPUT_STREAM_1", mockStreamDefinition("INPUT_STREAM_1")); put("INPUT_STREAM_2", mockStreamDefinition("INPUT_STREAM_2")); put("INPUT_STREAM_3", mockStreamDefinition("INPUT_STREAM_3")); put("INPUT_STREAM_4", mockStreamDefinition("INPUT_STREAM_4")); } }); Assert.assertTrue(validation.isSuccess()); Assert.assertEquals(1, validation.getPolicyExecutionPlan().getInputStreams().size()); Assert.assertEquals(1, validation.getPolicyExecutionPlan().getOutputStreams().size()); Assert.assertEquals(1, validation.getPolicyExecutionPlan().getStreamPartitions().size()); Assert.assertNotNull(validation.getPolicyExecutionPlan().getStreamPartitions().get(0).getSortSpec()); } @Test public void testValidPolicyWithTimeWindow() { PolicyDefinition policyDefinition = new PolicyDefinition(); policyDefinition.setName("test_policy"); policyDefinition.setInputStreams(Collections.singletonList("INPUT_STREAM_1")); policyDefinition.setOutputStreams(Collections.singletonList("OUTPUT_STREAM_1")); PolicyDefinition.Definition definition = new PolicyDefinition.Definition(); definition.setType("siddhi"); definition.setValue("from INPUT_STREAM_1#window.time(2 min) select name, sum(value) as total group by name insert into OUTPUT_STREAM_1 ;"); definition.setInputStreams(policyDefinition.getInputStreams()); definition.setOutputStreams(policyDefinition.getOutputStreams()); policyDefinition.setDefinition(definition); PolicyValidationResult validation = PolicyInterpreter.validate(policyDefinition, new HashMap<String, StreamDefinition>() { { put("INPUT_STREAM_1", mockStreamDefinition("INPUT_STREAM_1")); } }); Assert.assertTrue(validation.isSuccess()); Assert.assertEquals(1, validation.getPolicyExecutionPlan().getInputStreams().size()); Assert.assertEquals(1, validation.getPolicyExecutionPlan().getOutputStreams().size()); Assert.assertEquals(1, validation.getPolicyExecutionPlan().getStreamPartitions().size()); Assert.assertNull(validation.getPolicyExecutionPlan().getStreamPartitions().get(0).getSortSpec()); } @Test public void testValidPolicyWithTooManyInputStreams() { PolicyDefinition policyDefinition = new PolicyDefinition(); policyDefinition.setName("test_policy"); policyDefinition.setInputStreams(Arrays.asList("INPUT_STREAM_1", "INPUT_STREAM_2")); policyDefinition.setOutputStreams(Collections.singletonList("OUTPUT_STREAM_1")); PolicyDefinition.Definition definition = new PolicyDefinition.Definition(); definition.setType("siddhi"); definition.setValue("from INPUT_STREAM_1[value > 90.0] select * group by name insert into OUTPUT_STREAM_1;"); definition.setInputStreams(policyDefinition.getInputStreams()); definition.setOutputStreams(policyDefinition.getOutputStreams()); policyDefinition.setDefinition(definition); PolicyValidationResult validation = PolicyInterpreter.validate(policyDefinition, new HashMap<String, StreamDefinition>() { { put("INPUT_STREAM_1", mockStreamDefinition("INPUT_STREAM_1")); put("INPUT_STREAM_2", mockStreamDefinition("INPUT_STREAM_2")); } }); Assert.assertTrue(validation.isSuccess()); Assert.assertEquals(1, validation.getPolicyExecutionPlan().getInputStreams().size()); Assert.assertEquals(1, validation.getPolicyExecutionPlan().getOutputStreams().size()); } @Test public void testValidPolicyWithTooFewOutputStreams() { PolicyDefinition policyDefinition = new PolicyDefinition(); policyDefinition.setName("test_policy"); policyDefinition.setInputStreams(Arrays.asList("INPUT_STREAM_1", "INPUT_STREAM_2")); policyDefinition.setOutputStreams(Collections.singletonList("OUTPUT_STREAM_1")); PolicyDefinition.Definition definition = new PolicyDefinition.Definition(); definition.setType("siddhi"); definition.setValue( "from INPUT_STREAM_1[value > 90.0] select * group by name insert into OUTPUT_STREAM_1;" + "from INPUT_STREAM_1[value < 90.0] select * group by name insert into OUTPUT_STREAM_2;" ); definition.setInputStreams(policyDefinition.getInputStreams()); definition.setOutputStreams(policyDefinition.getOutputStreams()); policyDefinition.setDefinition(definition); PolicyValidationResult validation = PolicyInterpreter.validate(policyDefinition, new HashMap<String, StreamDefinition>() { { put("INPUT_STREAM_1", mockStreamDefinition("INPUT_STREAM_1")); put("INPUT_STREAM_2", mockStreamDefinition("INPUT_STREAM_2")); } }); Assert.assertTrue(validation.isSuccess()); Assert.assertEquals(1, validation.getPolicyExecutionPlan().getInputStreams().size()); Assert.assertEquals(2, validation.getPolicyExecutionPlan().getOutputStreams().size()); } @Test public void testInvalidPolicyForSyntaxError() { PolicyDefinition policyDefinition = new PolicyDefinition(); policyDefinition.setName("test_policy"); policyDefinition.setInputStreams(Collections.singletonList("INPUT_STREAM")); policyDefinition.setOutputStreams(Collections.singletonList("OUTPUT_STREAM")); PolicyDefinition.Definition definition = new PolicyDefinition.Definition(); definition.setType("siddhi"); definition.setValue("from INPUT_STREAM (value > 90.0) select * group by name insert into OUTPUT_STREAM;"); definition.setInputStreams(policyDefinition.getInputStreams()); definition.setOutputStreams(policyDefinition.getOutputStreams()); policyDefinition.setDefinition(definition); PolicyValidationResult validation = PolicyInterpreter.validate(policyDefinition, new HashMap<String, StreamDefinition>() { { put("INPUT_STREAM", mockStreamDefinition("INPUT_STREAM")); } }); Assert.assertFalse(validation.isSuccess()); } @Test public void testInvalidPolicyForNotDefinedInputStream() { PolicyDefinition policyDefinition = new PolicyDefinition(); policyDefinition.setName("test_policy"); policyDefinition.setInputStreams(Collections.singletonList("INPUT_STREAM_1")); policyDefinition.setOutputStreams(Collections.singletonList("OUTPUT_STREAM_1")); PolicyDefinition.Definition definition = new PolicyDefinition.Definition(); definition.setType("siddhi"); definition.setValue("from INPUT_STREAM_1[value > 90.0] select * group by name insert into OUTPUT_STREAM_1;"); definition.setInputStreams(policyDefinition.getInputStreams()); definition.setOutputStreams(policyDefinition.getOutputStreams()); policyDefinition.setDefinition(definition); PolicyValidationResult validation = PolicyInterpreter.validate(policyDefinition, new HashMap<String, StreamDefinition>() { { put("INPUT_STREAM_2", mockStreamDefinition("INPUT_STREAM_2")); } }); Assert.assertFalse(validation.isSuccess()); } @Test public void testInvalidPolicyForNotDefinedOutputStream() { PolicyDefinition policyDefinition = new PolicyDefinition(); policyDefinition.setName("test_policy"); policyDefinition.setInputStreams(Collections.singletonList("INPUT_STREAM_1")); policyDefinition.setOutputStreams(Collections.singletonList("OUTPUT_STREAM_2")); PolicyDefinition.Definition definition = new PolicyDefinition.Definition(); definition.setType("siddhi"); definition.setValue("from INPUT_STREAM_1[value > 90.0] select * group by name insert into OUTPUT_STREAM_1;"); definition.setInputStreams(policyDefinition.getInputStreams()); definition.setOutputStreams(policyDefinition.getOutputStreams()); policyDefinition.setDefinition(definition); PolicyValidationResult validation = PolicyInterpreter.validate(policyDefinition, new HashMap<String, StreamDefinition>() { { put("INPUT_STREAM_1", mockStreamDefinition("INPUT_STREAM_1")); } }); Assert.assertFalse(validation.isSuccess()); } // --------------------- // Two Stream Test Cases // --------------------- @Test public void testParseTwoStreamPolicyQueryWithMultiplePartition() throws Exception { PolicyExecutionPlan executionPlan = PolicyInterpreter.parseExecutionPlan( "from HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX_1#window.externalTime(timestamp, 1 min) " + "select cmd,user, count() as total_count group by cmd,user insert into HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX_OUT_1;" + "from HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX_2#window.externalTime(timestamp, 1 hour) " + "select cmd,user, count() as total_count group by cmd,user insert into HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX_OUT_2;" ); Assert.assertTrue(executionPlan.getInputStreams().containsKey("HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX_1")); Assert.assertTrue(executionPlan.getInputStreams().containsKey("HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX_2")); Assert.assertTrue(executionPlan.getOutputStreams().containsKey("HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX_OUT_1")); Assert.assertTrue(executionPlan.getOutputStreams().containsKey("HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX_OUT_2")); Assert.assertEquals(2, executionPlan.getStreamPartitions().size()); Assert.assertEquals(60*1000, executionPlan.getStreamPartitions().get(0).getSortSpec().getWindowPeriodMillis()); Assert.assertEquals(60*60*1000, executionPlan.getStreamPartitions().get(1).getSortSpec().getWindowPeriodMillis()); } @Test public void testParseTwoStreamPolicyQueryWithSinglePartition() throws Exception { PolicyExecutionPlan executionPlan = PolicyInterpreter.parseExecutionPlan( "from HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX_1#window.externalTime(timestamp, 1 min) " + "select cmd,user, count() as total_count group by cmd,user insert into HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX_OUT_1;" + "from HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX_2 select * insert into HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX_OUT_2;" ); Assert.assertTrue(executionPlan.getInputStreams().containsKey("HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX_1")); Assert.assertTrue(executionPlan.getInputStreams().containsKey("HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX_2")); Assert.assertTrue(executionPlan.getOutputStreams().containsKey("HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX_OUT_1")); Assert.assertTrue(executionPlan.getOutputStreams().containsKey("HDFS_AUDIT_LOG_ENRICHED_STREAM_SANDBOX_OUT_2")); Assert.assertEquals(2, executionPlan.getStreamPartitions().size()); Assert.assertEquals(60*1000, executionPlan.getStreamPartitions().get(0).getSortSpec().getWindowPeriodMillis()); Assert.assertEquals(StreamPartition.Type.SHUFFLE, executionPlan.getStreamPartitions().get(1).getType()); } @Test public void testParseTwoStreamPolicyQueryInnerJoin() throws Exception { PolicyExecutionPlan executionPlan = PolicyInterpreter.parseExecutionPlan( "from TickEvent[symbol=='EBAY']#window.length(2000) " + "join NewsEvent#window.externalTime(timestamp, 1000 sec) \n" + "select * insert into JoinStream" ); Assert.assertTrue(executionPlan.getInputStreams().containsKey("TickEvent")); Assert.assertTrue(executionPlan.getInputStreams().containsKey("NewsEvent")); Assert.assertTrue(executionPlan.getOutputStreams().containsKey("JoinStream")); Assert.assertEquals(StreamPartition.Type.SHUFFLE, executionPlan.getStreamPartitions().get(0).getType()); Assert.assertNotNull(executionPlan.getStreamPartitions().get(0).getSortSpec()); Assert.assertEquals(1000*1000, executionPlan.getStreamPartitions().get(0).getSortSpec().getWindowPeriodMillis()); Assert.assertEquals(StreamPartition.Type.SHUFFLE, executionPlan.getStreamPartitions().get(1).getType()); Assert.assertNull(executionPlan.getStreamPartitions().get(1).getSortSpec()); } @Test public void testParseTwoStreamPolicyQueryInnerJoinWithCondition() throws Exception { PolicyExecutionPlan executionPlan = PolicyInterpreter.parseExecutionPlan( "from TickEvent[symbol=='EBAY']#window.length(2000) as t unidirectional \n" + "join NewsEvent#window.externalTime(timestamp, 1000 sec) as n \n" + "on TickEvent.symbol == NewsEvent.company \n" + "insert into JoinStream " ); Assert.assertTrue(executionPlan.getInputStreams().containsKey("TickEvent")); Assert.assertTrue(executionPlan.getInputStreams().containsKey("NewsEvent")); Assert.assertTrue(executionPlan.getOutputStreams().containsKey("JoinStream")); Assert.assertEquals(StreamPartition.Type.SHUFFLE, executionPlan.getStreamPartitions().get(0).getType()); Assert.assertNotNull(executionPlan.getStreamPartitions().get(0).getSortSpec()); Assert.assertEquals(1000*1000, executionPlan.getStreamPartitions().get(0).getSortSpec().getWindowPeriodMillis()); Assert.assertEquals(StreamPartition.Type.GROUPBY, executionPlan.getStreamPartitions().get(1).getType()); Assert.assertNull(executionPlan.getStreamPartitions().get(1).getSortSpec()); } @Test public void testParseTwoStreamPolicyQueryInnerJoinWithConditionHavingAlias() throws Exception { PolicyExecutionPlan executionPlan = PolicyInterpreter.parseExecutionPlan( "from TickEvent[symbol=='EBAY']#window.length(2000) as t unidirectional \n" + "join NewsEvent#window.externalTime(timestamp, 1000 sec) as n \n" + "on t.symbol == n.company \n" + "insert into JoinStream " ); Assert.assertTrue(executionPlan.getInputStreams().containsKey("TickEvent")); Assert.assertTrue(executionPlan.getInputStreams().containsKey("NewsEvent")); Assert.assertTrue(executionPlan.getOutputStreams().containsKey("JoinStream")); Assert.assertEquals(StreamPartition.Type.SHUFFLE, executionPlan.getStreamPartitions().get(0).getType()); Assert.assertNotNull(executionPlan.getStreamPartitions().get(0).getSortSpec()); Assert.assertEquals(1000*1000, executionPlan.getStreamPartitions().get(0).getSortSpec().getWindowPeriodMillis()); Assert.assertEquals(StreamPartition.Type.GROUPBY, executionPlan.getStreamPartitions().get(1).getType()); Assert.assertNull(executionPlan.getStreamPartitions().get(1).getSortSpec()); } @Test(expected = DefinitionNotExistException.class) public void testParseTwoStreamPolicyQueryInnerJoinWithConditionHavingNotFoundAlias() throws Exception { PolicyInterpreter.parseExecutionPlan( "from TickEvent[symbol=='EBAY']#window.length(2000) as t unidirectional \n" + "join NewsEvent#window.externalTime(timestamp, 1000 sec) as n \n" + "on t.symbol == NOT_EXIST_ALIAS.company \n" + "insert into JoinStream " ); } private static final ObjectMapper mapper = new ObjectMapper(); @Test public void testLeftJoin() throws Exception { PolicyDefinition def = mapper.readValue(PolicyInterpreterTest.class.getResourceAsStream("/interpreter/policy.json"), PolicyDefinition.class); ArrayNode array = (ArrayNode)mapper.readTree(PolicyInterpreterTest.class.getResourceAsStream("/interpreter/streams.json")); Map<String, StreamDefinition> allDefinitions = new HashMap<>(); for(JsonNode node : array) { StreamDefinition streamDef = mapper.readValue(node.toString(), StreamDefinition.class); allDefinitions.put(streamDef.getStreamId(), streamDef); } PolicyValidationResult result = PolicyInterpreter.validate(def, allDefinitions); Assert.assertTrue(result.isSuccess()); } @Test public void testExtendPolicy() throws Exception { PolicyDefinition policyDefinition = new PolicyDefinition(); policyDefinition.setName("test-extend-policy"); policyDefinition.setInputStreams(Collections.singletonList("INPUT_STREAM_1")); policyDefinition.setOutputStreams(Collections.singletonList("OUTPUT_STREAM_1")); PolicyDefinition.Definition definition = new PolicyDefinition.Definition(); definition.setType(PolicyStreamHandlers.CUSTOMIZED_ENGINE); policyDefinition.setDefinition(definition); Map<String, StreamDefinition> allDefinitions = new HashMap<>(); allDefinitions.put("INPUT_STREAM_1", mockStreamDefinition("INPUT_STREAM_1")); PolicyValidationResult result = PolicyInterpreter.validate(policyDefinition, allDefinitions); Assert.assertTrue(result.isSuccess()); } // -------------- // Helper Methods // -------------- private static StreamDefinition mockStreamDefinition(String streamId) { StreamDefinition streamDefinition = new StreamDefinition(); streamDefinition.setStreamId(streamId); List<StreamColumn> columns = new ArrayList<>(); columns.add(new StreamColumn.Builder().name("name").type(StreamColumn.Type.STRING).build()); columns.add(new StreamColumn.Builder().name("value").type(StreamColumn.Type.DOUBLE).build()); columns.add(new StreamColumn.Builder().name("timestamp").type(StreamColumn.Type.LONG).build()); streamDefinition.setColumns(columns); return streamDefinition; } @Test public void testValidPolicyWithPattern() { PolicyDefinition policyDefinition = new PolicyDefinition(); policyDefinition.setName("test_policy"); policyDefinition.setInputStreams(Collections.singletonList("HADOOP_JMX_METRIC_STREAM_1")); policyDefinition.setOutputStreams(Collections.singletonList("HADOOP_JMX_METRIC_STREAM_1_MISS_BLOCKS_OUT")); PolicyDefinition.Definition definition = new PolicyDefinition.Definition(); definition.setType("siddhi"); String policy = "from every a = HADOOP_JMX_METRIC_STREAM_1[component==\"namenode\" and metric == \"hadoop.namenode.dfs.missingblocks\"] " + "-> b = HADOOP_JMX_METRIC_STREAM_1[b.component==a.component and b.metric==a.metric and b.host==a.host and convert(b.value, \"long\") > convert(a.value, \"long\") ] " + "select b.metric, b.host as host, convert(b.value, \"long\") as newNumOfMissingBlocks, convert(a.value, \"long\") as oldNumOfMissingBlocks, b.timestamp as timestamp, b.component as component, b.site as site " + "group by b.metric insert into HADOOP_JMX_METRIC_STREAM_1_MISS_BLOCKS_OUT;"; definition.setValue(policy); definition.setInputStreams(policyDefinition.getInputStreams()); definition.setOutputStreams(policyDefinition.getOutputStreams()); policyDefinition.setDefinition(definition); PolicyValidationResult validation = PolicyInterpreter.validate(policyDefinition, new HashMap<String, StreamDefinition>() { { put("HADOOP_JMX_METRIC_STREAM_1", mockStreamDefinition("HADOOP_JMX_METRIC_STREAM_1")); } }); Assert.assertTrue(validation.isSuccess()); Assert.assertEquals(1, validation.getPolicyExecutionPlan().getInputStreams().size()); Assert.assertEquals(1, validation.getPolicyExecutionPlan().getOutputStreams().size()); Assert.assertEquals(1, validation.getPolicyExecutionPlan().getStreamPartitions().size()); Assert.assertNull(validation.getPolicyExecutionPlan().getStreamPartitions().get(0).getSortSpec()); Assert.assertEquals(StreamPartition.Type.GROUPBY, validation.getPolicyExecutionPlan().getStreamPartitions().get(0).getType()); Assert.assertArrayEquals(new String[]{"metric"}, validation.getPolicyExecutionPlan().getStreamPartitions().get(0).getColumns().toArray()); Assert.assertEquals("HADOOP_JMX_METRIC_STREAM_1", validation.getPolicyExecutionPlan().getStreamPartitions().get(0).getStreamId()); } @Test public void testValidPolicyWithPatternSort() { PolicyDefinition policyDefinition = new PolicyDefinition(); policyDefinition.setName("test_policy"); policyDefinition.setInputStreams(Collections.singletonList("HADOOP_JMX_METRIC_STREAM_1")); policyDefinition.setOutputStreams(Collections.singletonList("HADOOP_JMX_METRIC_STREAM_1_MISS_BLOCKS_OUT")); PolicyDefinition.Definition definition = new PolicyDefinition.Definition(); definition.setType("siddhi"); String policy = "from HADOOP_JMX_METRIC_STREAM_1[metric == \"hadoop.namenode.dfs.missingblocks\"]#window.externalTime(timestamp, 1 min) " + "select * group by site, host, component, metric insert into temp;\n" + "\n" + "from every a = HADOOP_JMX_METRIC_STREAM_1[metric == \"hadoop.namenode.dfs.missingblocks\"] -> b = HADOOP_JMX_METRIC_STREAM_1[b.component == a.component and b.metric == a.metric and b.host == a.host and convert(b.value, \"long\") > convert(a.value, \"long\") ] " + "select b.site, b.host, b.component, b.metric, convert(b.value, \"long\") as newNumOfMissingBlocks, convert(a.value, \"long\") as oldNumOfMissingBlocks, max(b.timestamp) as timestamp " + "group by b.site, b.host, b.component, b.metric insert into HADOOP_JMX_METRIC_STREAM_1_MISS_BLOCKS_OUT;"; definition.setValue(policy); definition.setInputStreams(policyDefinition.getInputStreams()); definition.setOutputStreams(policyDefinition.getOutputStreams()); policyDefinition.setDefinition(definition); PolicyValidationResult validation = PolicyInterpreter.validate(policyDefinition, new HashMap<String, StreamDefinition>() { { put("HADOOP_JMX_METRIC_STREAM_1", mockStreamDefinition("HADOOP_JMX_METRIC_STREAM_1")); } }); Assert.assertTrue(validation.isSuccess()); Assert.assertEquals(1, validation.getPolicyExecutionPlan().getInputStreams().size()); Assert.assertEquals(2, validation.getPolicyExecutionPlan().getOutputStreams().size()); Assert.assertEquals(1, validation.getPolicyExecutionPlan().getStreamPartitions().size()); Assert.assertNotNull(validation.getPolicyExecutionPlan().getStreamPartitions().get(0).getSortSpec()); Assert.assertEquals(60000, validation.getPolicyExecutionPlan().getStreamPartitions().get(0).getSortSpec().getWindowPeriodMillis()); Assert.assertEquals(12000, validation.getPolicyExecutionPlan().getStreamPartitions().get(0).getSortSpec().getWindowMargin()); Assert.assertEquals(StreamPartition.Type.GROUPBY, validation.getPolicyExecutionPlan().getStreamPartitions().get(0).getType()); Assert.assertArrayEquals(new String[]{"site", "host", "component", "metric"}, validation.getPolicyExecutionPlan().getStreamPartitions().get(0).getColumns().toArray()); Assert.assertEquals("HADOOP_JMX_METRIC_STREAM_1", validation.getPolicyExecutionPlan().getStreamPartitions().get(0).getStreamId()); } @Test public void testValidPolicyWithSequence() { PolicyDefinition policyDefinition = new PolicyDefinition(); policyDefinition.setName("test_policy"); policyDefinition.setInputStreams(Collections.singletonList("HADOOP_JMX_METRIC_STREAM_1")); policyDefinition.setOutputStreams(Collections.singletonList("HADOOP_JMX_METRIC_STREAM_1_MISS_BLOCKS_OUT")); PolicyDefinition.Definition definition = new PolicyDefinition.Definition(); definition.setType("siddhi"); String policy = "from every a = HADOOP_JMX_METRIC_STREAM_1[component==\"namenode\" and metric == \"hadoop.namenode.dfs.missingblocks\"] " + ", b = HADOOP_JMX_METRIC_STREAM_1[b.component==a.component and b.metric==a.metric and b.host==a.host and convert(b.value, \"long\") > convert(a.value, \"long\") ] " + "select b.metric, b.host as host, convert(b.value, \"long\") as newNumOfMissingBlocks, convert(a.value, \"long\") as oldNumOfMissingBlocks, b.timestamp as timestamp, b.component as component, b.site as site " + "group by b.metric insert into HADOOP_JMX_METRIC_STREAM_1_MISS_BLOCKS_OUT;"; definition.setValue(policy); definition.setInputStreams(policyDefinition.getInputStreams()); definition.setOutputStreams(policyDefinition.getOutputStreams()); policyDefinition.setDefinition(definition); PolicyValidationResult validation = PolicyInterpreter.validate(policyDefinition, new HashMap<String, StreamDefinition>() { { put("HADOOP_JMX_METRIC_STREAM_1", mockStreamDefinition("HADOOP_JMX_METRIC_STREAM_1")); } }); Assert.assertTrue(validation.isSuccess()); Assert.assertEquals(1, validation.getPolicyExecutionPlan().getInputStreams().size()); Assert.assertEquals(1, validation.getPolicyExecutionPlan().getOutputStreams().size()); Assert.assertEquals(1, validation.getPolicyExecutionPlan().getStreamPartitions().size()); Assert.assertNull(validation.getPolicyExecutionPlan().getStreamPartitions().get(0).getSortSpec()); Assert.assertEquals(StreamPartition.Type.GROUPBY, validation.getPolicyExecutionPlan().getStreamPartitions().get(0).getType()); Assert.assertArrayEquals(new String[]{"metric"}, validation.getPolicyExecutionPlan().getStreamPartitions().get(0).getColumns().toArray()); Assert.assertEquals("HADOOP_JMX_METRIC_STREAM_1", validation.getPolicyExecutionPlan().getStreamPartitions().get(0).getStreamId()); } @Test public void testValidPolicyWithSequenceSort() { PolicyDefinition policyDefinition = new PolicyDefinition(); policyDefinition.setName("test_policy"); policyDefinition.setInputStreams(Collections.singletonList("HADOOP_JMX_METRIC_STREAM_1")); policyDefinition.setOutputStreams(Collections.singletonList("HADOOP_JMX_METRIC_STREAM_1_MISS_BLOCKS_OUT")); PolicyDefinition.Definition definition = new PolicyDefinition.Definition(); definition.setType("siddhi"); String policy = "from HADOOP_JMX_METRIC_STREAM_1[metric == \"hadoop.namenode.dfs.missingblocks\"]#window.externalTime(timestamp, 1 min) " + "select * group by site, host, component, metric insert into temp;\n" + "\n" + "from every a = HADOOP_JMX_METRIC_STREAM_1[metric == \"hadoop.namenode.dfs.missingblocks\"], b = HADOOP_JMX_METRIC_STREAM_1[b.component == a.component and b.metric == a.metric and b.host == a.host and convert(b.value, \"long\") > convert(a.value, \"long\") ] " + "select b.site, b.host, b.component, b.metric, convert(b.value, \"long\") as newNumOfMissingBlocks, convert(a.value, \"long\") as oldNumOfMissingBlocks, max(b.timestamp) as timestamp " + "group by b.site, b.host, b.component, b.metric insert into HADOOP_JMX_METRIC_STREAM_1_MISS_BLOCKS_OUT;"; definition.setValue(policy); definition.setInputStreams(policyDefinition.getInputStreams()); definition.setOutputStreams(policyDefinition.getOutputStreams()); policyDefinition.setDefinition(definition); PolicyValidationResult validation = PolicyInterpreter.validate(policyDefinition, new HashMap<String, StreamDefinition>() { { put("HADOOP_JMX_METRIC_STREAM_1", mockStreamDefinition("HADOOP_JMX_METRIC_STREAM_1")); } }); Assert.assertTrue(validation.isSuccess()); Assert.assertEquals(1, validation.getPolicyExecutionPlan().getInputStreams().size()); Assert.assertEquals(2, validation.getPolicyExecutionPlan().getOutputStreams().size()); Assert.assertEquals(1, validation.getPolicyExecutionPlan().getStreamPartitions().size()); Assert.assertNotNull(validation.getPolicyExecutionPlan().getStreamPartitions().get(0).getSortSpec()); Assert.assertEquals(60000, validation.getPolicyExecutionPlan().getStreamPartitions().get(0).getSortSpec().getWindowPeriodMillis()); Assert.assertEquals(12000, validation.getPolicyExecutionPlan().getStreamPartitions().get(0).getSortSpec().getWindowMargin()); Assert.assertEquals(StreamPartition.Type.GROUPBY, validation.getPolicyExecutionPlan().getStreamPartitions().get(0).getType()); Assert.assertArrayEquals(new String[]{"site", "host", "component", "metric"}, validation.getPolicyExecutionPlan().getStreamPartitions().get(0).getColumns().toArray()); Assert.assertEquals("HADOOP_JMX_METRIC_STREAM_1", validation.getPolicyExecutionPlan().getStreamPartitions().get(0).getStreamId()); } }
googleapis/google-cloud-java
36,551
java-kms/proto-google-cloud-kms-v1/src/main/java/com/google/cloud/kms/v1/ListKeyHandlesResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/kms/v1/autokey.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.kms.v1; /** * * * <pre> * Response message for * [Autokey.ListKeyHandles][google.cloud.kms.v1.Autokey.ListKeyHandles]. * </pre> * * Protobuf type {@code google.cloud.kms.v1.ListKeyHandlesResponse} */ public final class ListKeyHandlesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.kms.v1.ListKeyHandlesResponse) ListKeyHandlesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListKeyHandlesResponse.newBuilder() to construct. private ListKeyHandlesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListKeyHandlesResponse() { keyHandles_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListKeyHandlesResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.kms.v1.AutokeyProto .internal_static_google_cloud_kms_v1_ListKeyHandlesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.kms.v1.AutokeyProto .internal_static_google_cloud_kms_v1_ListKeyHandlesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.kms.v1.ListKeyHandlesResponse.class, com.google.cloud.kms.v1.ListKeyHandlesResponse.Builder.class); } public static final int KEY_HANDLES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.kms.v1.KeyHandle> keyHandles_; /** * * * <pre> * Resulting [KeyHandles][google.cloud.kms.v1.KeyHandle]. * </pre> * * <code>repeated .google.cloud.kms.v1.KeyHandle key_handles = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.kms.v1.KeyHandle> getKeyHandlesList() { return keyHandles_; } /** * * * <pre> * Resulting [KeyHandles][google.cloud.kms.v1.KeyHandle]. * </pre> * * <code>repeated .google.cloud.kms.v1.KeyHandle key_handles = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.kms.v1.KeyHandleOrBuilder> getKeyHandlesOrBuilderList() { return keyHandles_; } /** * * * <pre> * Resulting [KeyHandles][google.cloud.kms.v1.KeyHandle]. * </pre> * * <code>repeated .google.cloud.kms.v1.KeyHandle key_handles = 1;</code> */ @java.lang.Override public int getKeyHandlesCount() { return keyHandles_.size(); } /** * * * <pre> * Resulting [KeyHandles][google.cloud.kms.v1.KeyHandle]. * </pre> * * <code>repeated .google.cloud.kms.v1.KeyHandle key_handles = 1;</code> */ @java.lang.Override public com.google.cloud.kms.v1.KeyHandle getKeyHandles(int index) { return keyHandles_.get(index); } /** * * * <pre> * Resulting [KeyHandles][google.cloud.kms.v1.KeyHandle]. * </pre> * * <code>repeated .google.cloud.kms.v1.KeyHandle key_handles = 1;</code> */ @java.lang.Override public com.google.cloud.kms.v1.KeyHandleOrBuilder getKeyHandlesOrBuilder(int index) { return keyHandles_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve next page of results. Pass this value in * [ListKeyHandlesRequest.page_token][google.cloud.kms.v1.ListKeyHandlesRequest.page_token] * to retrieve the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token to retrieve next page of results. Pass this value in * [ListKeyHandlesRequest.page_token][google.cloud.kms.v1.ListKeyHandlesRequest.page_token] * to retrieve the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < keyHandles_.size(); i++) { output.writeMessage(1, keyHandles_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < keyHandles_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, keyHandles_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.kms.v1.ListKeyHandlesResponse)) { return super.equals(obj); } com.google.cloud.kms.v1.ListKeyHandlesResponse other = (com.google.cloud.kms.v1.ListKeyHandlesResponse) obj; if (!getKeyHandlesList().equals(other.getKeyHandlesList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getKeyHandlesCount() > 0) { hash = (37 * hash) + KEY_HANDLES_FIELD_NUMBER; hash = (53 * hash) + getKeyHandlesList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.kms.v1.ListKeyHandlesResponse parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.kms.v1.ListKeyHandlesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.kms.v1.ListKeyHandlesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.kms.v1.ListKeyHandlesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.kms.v1.ListKeyHandlesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.kms.v1.ListKeyHandlesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.kms.v1.ListKeyHandlesResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.kms.v1.ListKeyHandlesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.kms.v1.ListKeyHandlesResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.kms.v1.ListKeyHandlesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.kms.v1.ListKeyHandlesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.kms.v1.ListKeyHandlesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.kms.v1.ListKeyHandlesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for * [Autokey.ListKeyHandles][google.cloud.kms.v1.Autokey.ListKeyHandles]. * </pre> * * Protobuf type {@code google.cloud.kms.v1.ListKeyHandlesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.kms.v1.ListKeyHandlesResponse) com.google.cloud.kms.v1.ListKeyHandlesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.kms.v1.AutokeyProto .internal_static_google_cloud_kms_v1_ListKeyHandlesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.kms.v1.AutokeyProto .internal_static_google_cloud_kms_v1_ListKeyHandlesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.kms.v1.ListKeyHandlesResponse.class, com.google.cloud.kms.v1.ListKeyHandlesResponse.Builder.class); } // Construct using com.google.cloud.kms.v1.ListKeyHandlesResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (keyHandlesBuilder_ == null) { keyHandles_ = java.util.Collections.emptyList(); } else { keyHandles_ = null; keyHandlesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.kms.v1.AutokeyProto .internal_static_google_cloud_kms_v1_ListKeyHandlesResponse_descriptor; } @java.lang.Override public com.google.cloud.kms.v1.ListKeyHandlesResponse getDefaultInstanceForType() { return com.google.cloud.kms.v1.ListKeyHandlesResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.kms.v1.ListKeyHandlesResponse build() { com.google.cloud.kms.v1.ListKeyHandlesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.kms.v1.ListKeyHandlesResponse buildPartial() { com.google.cloud.kms.v1.ListKeyHandlesResponse result = new com.google.cloud.kms.v1.ListKeyHandlesResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(com.google.cloud.kms.v1.ListKeyHandlesResponse result) { if (keyHandlesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { keyHandles_ = java.util.Collections.unmodifiableList(keyHandles_); bitField0_ = (bitField0_ & ~0x00000001); } result.keyHandles_ = keyHandles_; } else { result.keyHandles_ = keyHandlesBuilder_.build(); } } private void buildPartial0(com.google.cloud.kms.v1.ListKeyHandlesResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.kms.v1.ListKeyHandlesResponse) { return mergeFrom((com.google.cloud.kms.v1.ListKeyHandlesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.kms.v1.ListKeyHandlesResponse other) { if (other == com.google.cloud.kms.v1.ListKeyHandlesResponse.getDefaultInstance()) return this; if (keyHandlesBuilder_ == null) { if (!other.keyHandles_.isEmpty()) { if (keyHandles_.isEmpty()) { keyHandles_ = other.keyHandles_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureKeyHandlesIsMutable(); keyHandles_.addAll(other.keyHandles_); } onChanged(); } } else { if (!other.keyHandles_.isEmpty()) { if (keyHandlesBuilder_.isEmpty()) { keyHandlesBuilder_.dispose(); keyHandlesBuilder_ = null; keyHandles_ = other.keyHandles_; bitField0_ = (bitField0_ & ~0x00000001); keyHandlesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getKeyHandlesFieldBuilder() : null; } else { keyHandlesBuilder_.addAllMessages(other.keyHandles_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.kms.v1.KeyHandle m = input.readMessage( com.google.cloud.kms.v1.KeyHandle.parser(), extensionRegistry); if (keyHandlesBuilder_ == null) { ensureKeyHandlesIsMutable(); keyHandles_.add(m); } else { keyHandlesBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.kms.v1.KeyHandle> keyHandles_ = java.util.Collections.emptyList(); private void ensureKeyHandlesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { keyHandles_ = new java.util.ArrayList<com.google.cloud.kms.v1.KeyHandle>(keyHandles_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.kms.v1.KeyHandle, com.google.cloud.kms.v1.KeyHandle.Builder, com.google.cloud.kms.v1.KeyHandleOrBuilder> keyHandlesBuilder_; /** * * * <pre> * Resulting [KeyHandles][google.cloud.kms.v1.KeyHandle]. * </pre> * * <code>repeated .google.cloud.kms.v1.KeyHandle key_handles = 1;</code> */ public java.util.List<com.google.cloud.kms.v1.KeyHandle> getKeyHandlesList() { if (keyHandlesBuilder_ == null) { return java.util.Collections.unmodifiableList(keyHandles_); } else { return keyHandlesBuilder_.getMessageList(); } } /** * * * <pre> * Resulting [KeyHandles][google.cloud.kms.v1.KeyHandle]. * </pre> * * <code>repeated .google.cloud.kms.v1.KeyHandle key_handles = 1;</code> */ public int getKeyHandlesCount() { if (keyHandlesBuilder_ == null) { return keyHandles_.size(); } else { return keyHandlesBuilder_.getCount(); } } /** * * * <pre> * Resulting [KeyHandles][google.cloud.kms.v1.KeyHandle]. * </pre> * * <code>repeated .google.cloud.kms.v1.KeyHandle key_handles = 1;</code> */ public com.google.cloud.kms.v1.KeyHandle getKeyHandles(int index) { if (keyHandlesBuilder_ == null) { return keyHandles_.get(index); } else { return keyHandlesBuilder_.getMessage(index); } } /** * * * <pre> * Resulting [KeyHandles][google.cloud.kms.v1.KeyHandle]. * </pre> * * <code>repeated .google.cloud.kms.v1.KeyHandle key_handles = 1;</code> */ public Builder setKeyHandles(int index, com.google.cloud.kms.v1.KeyHandle value) { if (keyHandlesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureKeyHandlesIsMutable(); keyHandles_.set(index, value); onChanged(); } else { keyHandlesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Resulting [KeyHandles][google.cloud.kms.v1.KeyHandle]. * </pre> * * <code>repeated .google.cloud.kms.v1.KeyHandle key_handles = 1;</code> */ public Builder setKeyHandles( int index, com.google.cloud.kms.v1.KeyHandle.Builder builderForValue) { if (keyHandlesBuilder_ == null) { ensureKeyHandlesIsMutable(); keyHandles_.set(index, builderForValue.build()); onChanged(); } else { keyHandlesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Resulting [KeyHandles][google.cloud.kms.v1.KeyHandle]. * </pre> * * <code>repeated .google.cloud.kms.v1.KeyHandle key_handles = 1;</code> */ public Builder addKeyHandles(com.google.cloud.kms.v1.KeyHandle value) { if (keyHandlesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureKeyHandlesIsMutable(); keyHandles_.add(value); onChanged(); } else { keyHandlesBuilder_.addMessage(value); } return this; } /** * * * <pre> * Resulting [KeyHandles][google.cloud.kms.v1.KeyHandle]. * </pre> * * <code>repeated .google.cloud.kms.v1.KeyHandle key_handles = 1;</code> */ public Builder addKeyHandles(int index, com.google.cloud.kms.v1.KeyHandle value) { if (keyHandlesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureKeyHandlesIsMutable(); keyHandles_.add(index, value); onChanged(); } else { keyHandlesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Resulting [KeyHandles][google.cloud.kms.v1.KeyHandle]. * </pre> * * <code>repeated .google.cloud.kms.v1.KeyHandle key_handles = 1;</code> */ public Builder addKeyHandles(com.google.cloud.kms.v1.KeyHandle.Builder builderForValue) { if (keyHandlesBuilder_ == null) { ensureKeyHandlesIsMutable(); keyHandles_.add(builderForValue.build()); onChanged(); } else { keyHandlesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Resulting [KeyHandles][google.cloud.kms.v1.KeyHandle]. * </pre> * * <code>repeated .google.cloud.kms.v1.KeyHandle key_handles = 1;</code> */ public Builder addKeyHandles( int index, com.google.cloud.kms.v1.KeyHandle.Builder builderForValue) { if (keyHandlesBuilder_ == null) { ensureKeyHandlesIsMutable(); keyHandles_.add(index, builderForValue.build()); onChanged(); } else { keyHandlesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Resulting [KeyHandles][google.cloud.kms.v1.KeyHandle]. * </pre> * * <code>repeated .google.cloud.kms.v1.KeyHandle key_handles = 1;</code> */ public Builder addAllKeyHandles( java.lang.Iterable<? extends com.google.cloud.kms.v1.KeyHandle> values) { if (keyHandlesBuilder_ == null) { ensureKeyHandlesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, keyHandles_); onChanged(); } else { keyHandlesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Resulting [KeyHandles][google.cloud.kms.v1.KeyHandle]. * </pre> * * <code>repeated .google.cloud.kms.v1.KeyHandle key_handles = 1;</code> */ public Builder clearKeyHandles() { if (keyHandlesBuilder_ == null) { keyHandles_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { keyHandlesBuilder_.clear(); } return this; } /** * * * <pre> * Resulting [KeyHandles][google.cloud.kms.v1.KeyHandle]. * </pre> * * <code>repeated .google.cloud.kms.v1.KeyHandle key_handles = 1;</code> */ public Builder removeKeyHandles(int index) { if (keyHandlesBuilder_ == null) { ensureKeyHandlesIsMutable(); keyHandles_.remove(index); onChanged(); } else { keyHandlesBuilder_.remove(index); } return this; } /** * * * <pre> * Resulting [KeyHandles][google.cloud.kms.v1.KeyHandle]. * </pre> * * <code>repeated .google.cloud.kms.v1.KeyHandle key_handles = 1;</code> */ public com.google.cloud.kms.v1.KeyHandle.Builder getKeyHandlesBuilder(int index) { return getKeyHandlesFieldBuilder().getBuilder(index); } /** * * * <pre> * Resulting [KeyHandles][google.cloud.kms.v1.KeyHandle]. * </pre> * * <code>repeated .google.cloud.kms.v1.KeyHandle key_handles = 1;</code> */ public com.google.cloud.kms.v1.KeyHandleOrBuilder getKeyHandlesOrBuilder(int index) { if (keyHandlesBuilder_ == null) { return keyHandles_.get(index); } else { return keyHandlesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Resulting [KeyHandles][google.cloud.kms.v1.KeyHandle]. * </pre> * * <code>repeated .google.cloud.kms.v1.KeyHandle key_handles = 1;</code> */ public java.util.List<? extends com.google.cloud.kms.v1.KeyHandleOrBuilder> getKeyHandlesOrBuilderList() { if (keyHandlesBuilder_ != null) { return keyHandlesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(keyHandles_); } } /** * * * <pre> * Resulting [KeyHandles][google.cloud.kms.v1.KeyHandle]. * </pre> * * <code>repeated .google.cloud.kms.v1.KeyHandle key_handles = 1;</code> */ public com.google.cloud.kms.v1.KeyHandle.Builder addKeyHandlesBuilder() { return getKeyHandlesFieldBuilder() .addBuilder(com.google.cloud.kms.v1.KeyHandle.getDefaultInstance()); } /** * * * <pre> * Resulting [KeyHandles][google.cloud.kms.v1.KeyHandle]. * </pre> * * <code>repeated .google.cloud.kms.v1.KeyHandle key_handles = 1;</code> */ public com.google.cloud.kms.v1.KeyHandle.Builder addKeyHandlesBuilder(int index) { return getKeyHandlesFieldBuilder() .addBuilder(index, com.google.cloud.kms.v1.KeyHandle.getDefaultInstance()); } /** * * * <pre> * Resulting [KeyHandles][google.cloud.kms.v1.KeyHandle]. * </pre> * * <code>repeated .google.cloud.kms.v1.KeyHandle key_handles = 1;</code> */ public java.util.List<com.google.cloud.kms.v1.KeyHandle.Builder> getKeyHandlesBuilderList() { return getKeyHandlesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.kms.v1.KeyHandle, com.google.cloud.kms.v1.KeyHandle.Builder, com.google.cloud.kms.v1.KeyHandleOrBuilder> getKeyHandlesFieldBuilder() { if (keyHandlesBuilder_ == null) { keyHandlesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.kms.v1.KeyHandle, com.google.cloud.kms.v1.KeyHandle.Builder, com.google.cloud.kms.v1.KeyHandleOrBuilder>( keyHandles_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); keyHandles_ = null; } return keyHandlesBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve next page of results. Pass this value in * [ListKeyHandlesRequest.page_token][google.cloud.kms.v1.ListKeyHandlesRequest.page_token] * to retrieve the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token to retrieve next page of results. Pass this value in * [ListKeyHandlesRequest.page_token][google.cloud.kms.v1.ListKeyHandlesRequest.page_token] * to retrieve the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token to retrieve next page of results. Pass this value in * [ListKeyHandlesRequest.page_token][google.cloud.kms.v1.ListKeyHandlesRequest.page_token] * to retrieve the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token to retrieve next page of results. Pass this value in * [ListKeyHandlesRequest.page_token][google.cloud.kms.v1.ListKeyHandlesRequest.page_token] * to retrieve the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token to retrieve next page of results. Pass this value in * [ListKeyHandlesRequest.page_token][google.cloud.kms.v1.ListKeyHandlesRequest.page_token] * to retrieve the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.kms.v1.ListKeyHandlesResponse) } // @@protoc_insertion_point(class_scope:google.cloud.kms.v1.ListKeyHandlesResponse) private static final com.google.cloud.kms.v1.ListKeyHandlesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.kms.v1.ListKeyHandlesResponse(); } public static com.google.cloud.kms.v1.ListKeyHandlesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListKeyHandlesResponse> PARSER = new com.google.protobuf.AbstractParser<ListKeyHandlesResponse>() { @java.lang.Override public ListKeyHandlesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListKeyHandlesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListKeyHandlesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.kms.v1.ListKeyHandlesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,515
java-servicedirectory/proto-google-cloud-servicedirectory-v1/src/main/java/com/google/cloud/servicedirectory/v1/ListEndpointsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/servicedirectory/v1/registration_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.servicedirectory.v1; /** * * * <pre> * The response message for * [RegistrationService.ListEndpoints][google.cloud.servicedirectory.v1.RegistrationService.ListEndpoints]. * </pre> * * Protobuf type {@code google.cloud.servicedirectory.v1.ListEndpointsResponse} */ public final class ListEndpointsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.servicedirectory.v1.ListEndpointsResponse) ListEndpointsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListEndpointsResponse.newBuilder() to construct. private ListEndpointsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListEndpointsResponse() { endpoints_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListEndpointsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.servicedirectory.v1.RegistrationServiceProto .internal_static_google_cloud_servicedirectory_v1_ListEndpointsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.servicedirectory.v1.RegistrationServiceProto .internal_static_google_cloud_servicedirectory_v1_ListEndpointsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.servicedirectory.v1.ListEndpointsResponse.class, com.google.cloud.servicedirectory.v1.ListEndpointsResponse.Builder.class); } public static final int ENDPOINTS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.servicedirectory.v1.Endpoint> endpoints_; /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1.Endpoint endpoints = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.servicedirectory.v1.Endpoint> getEndpointsList() { return endpoints_; } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1.Endpoint endpoints = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.servicedirectory.v1.EndpointOrBuilder> getEndpointsOrBuilderList() { return endpoints_; } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1.Endpoint endpoints = 1;</code> */ @java.lang.Override public int getEndpointsCount() { return endpoints_.size(); } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1.Endpoint endpoints = 1;</code> */ @java.lang.Override public com.google.cloud.servicedirectory.v1.Endpoint getEndpoints(int index) { return endpoints_.get(index); } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1.Endpoint endpoints = 1;</code> */ @java.lang.Override public com.google.cloud.servicedirectory.v1.EndpointOrBuilder getEndpointsOrBuilder(int index) { return endpoints_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < endpoints_.size(); i++) { output.writeMessage(1, endpoints_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < endpoints_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, endpoints_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.servicedirectory.v1.ListEndpointsResponse)) { return super.equals(obj); } com.google.cloud.servicedirectory.v1.ListEndpointsResponse other = (com.google.cloud.servicedirectory.v1.ListEndpointsResponse) obj; if (!getEndpointsList().equals(other.getEndpointsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getEndpointsCount() > 0) { hash = (37 * hash) + ENDPOINTS_FIELD_NUMBER; hash = (53 * hash) + getEndpointsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.servicedirectory.v1.ListEndpointsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.servicedirectory.v1.ListEndpointsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.servicedirectory.v1.ListEndpointsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.servicedirectory.v1.ListEndpointsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.servicedirectory.v1.ListEndpointsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.servicedirectory.v1.ListEndpointsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.servicedirectory.v1.ListEndpointsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.servicedirectory.v1.ListEndpointsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.servicedirectory.v1.ListEndpointsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.servicedirectory.v1.ListEndpointsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.servicedirectory.v1.ListEndpointsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.servicedirectory.v1.ListEndpointsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.servicedirectory.v1.ListEndpointsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The response message for * [RegistrationService.ListEndpoints][google.cloud.servicedirectory.v1.RegistrationService.ListEndpoints]. * </pre> * * Protobuf type {@code google.cloud.servicedirectory.v1.ListEndpointsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.servicedirectory.v1.ListEndpointsResponse) com.google.cloud.servicedirectory.v1.ListEndpointsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.servicedirectory.v1.RegistrationServiceProto .internal_static_google_cloud_servicedirectory_v1_ListEndpointsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.servicedirectory.v1.RegistrationServiceProto .internal_static_google_cloud_servicedirectory_v1_ListEndpointsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.servicedirectory.v1.ListEndpointsResponse.class, com.google.cloud.servicedirectory.v1.ListEndpointsResponse.Builder.class); } // Construct using com.google.cloud.servicedirectory.v1.ListEndpointsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (endpointsBuilder_ == null) { endpoints_ = java.util.Collections.emptyList(); } else { endpoints_ = null; endpointsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.servicedirectory.v1.RegistrationServiceProto .internal_static_google_cloud_servicedirectory_v1_ListEndpointsResponse_descriptor; } @java.lang.Override public com.google.cloud.servicedirectory.v1.ListEndpointsResponse getDefaultInstanceForType() { return com.google.cloud.servicedirectory.v1.ListEndpointsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.servicedirectory.v1.ListEndpointsResponse build() { com.google.cloud.servicedirectory.v1.ListEndpointsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.servicedirectory.v1.ListEndpointsResponse buildPartial() { com.google.cloud.servicedirectory.v1.ListEndpointsResponse result = new com.google.cloud.servicedirectory.v1.ListEndpointsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.servicedirectory.v1.ListEndpointsResponse result) { if (endpointsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { endpoints_ = java.util.Collections.unmodifiableList(endpoints_); bitField0_ = (bitField0_ & ~0x00000001); } result.endpoints_ = endpoints_; } else { result.endpoints_ = endpointsBuilder_.build(); } } private void buildPartial0(com.google.cloud.servicedirectory.v1.ListEndpointsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.servicedirectory.v1.ListEndpointsResponse) { return mergeFrom((com.google.cloud.servicedirectory.v1.ListEndpointsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.servicedirectory.v1.ListEndpointsResponse other) { if (other == com.google.cloud.servicedirectory.v1.ListEndpointsResponse.getDefaultInstance()) return this; if (endpointsBuilder_ == null) { if (!other.endpoints_.isEmpty()) { if (endpoints_.isEmpty()) { endpoints_ = other.endpoints_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureEndpointsIsMutable(); endpoints_.addAll(other.endpoints_); } onChanged(); } } else { if (!other.endpoints_.isEmpty()) { if (endpointsBuilder_.isEmpty()) { endpointsBuilder_.dispose(); endpointsBuilder_ = null; endpoints_ = other.endpoints_; bitField0_ = (bitField0_ & ~0x00000001); endpointsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getEndpointsFieldBuilder() : null; } else { endpointsBuilder_.addAllMessages(other.endpoints_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.servicedirectory.v1.Endpoint m = input.readMessage( com.google.cloud.servicedirectory.v1.Endpoint.parser(), extensionRegistry); if (endpointsBuilder_ == null) { ensureEndpointsIsMutable(); endpoints_.add(m); } else { endpointsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.servicedirectory.v1.Endpoint> endpoints_ = java.util.Collections.emptyList(); private void ensureEndpointsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { endpoints_ = new java.util.ArrayList<com.google.cloud.servicedirectory.v1.Endpoint>(endpoints_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.servicedirectory.v1.Endpoint, com.google.cloud.servicedirectory.v1.Endpoint.Builder, com.google.cloud.servicedirectory.v1.EndpointOrBuilder> endpointsBuilder_; /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1.Endpoint endpoints = 1;</code> */ public java.util.List<com.google.cloud.servicedirectory.v1.Endpoint> getEndpointsList() { if (endpointsBuilder_ == null) { return java.util.Collections.unmodifiableList(endpoints_); } else { return endpointsBuilder_.getMessageList(); } } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1.Endpoint endpoints = 1;</code> */ public int getEndpointsCount() { if (endpointsBuilder_ == null) { return endpoints_.size(); } else { return endpointsBuilder_.getCount(); } } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1.Endpoint endpoints = 1;</code> */ public com.google.cloud.servicedirectory.v1.Endpoint getEndpoints(int index) { if (endpointsBuilder_ == null) { return endpoints_.get(index); } else { return endpointsBuilder_.getMessage(index); } } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1.Endpoint endpoints = 1;</code> */ public Builder setEndpoints(int index, com.google.cloud.servicedirectory.v1.Endpoint value) { if (endpointsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureEndpointsIsMutable(); endpoints_.set(index, value); onChanged(); } else { endpointsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1.Endpoint endpoints = 1;</code> */ public Builder setEndpoints( int index, com.google.cloud.servicedirectory.v1.Endpoint.Builder builderForValue) { if (endpointsBuilder_ == null) { ensureEndpointsIsMutable(); endpoints_.set(index, builderForValue.build()); onChanged(); } else { endpointsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1.Endpoint endpoints = 1;</code> */ public Builder addEndpoints(com.google.cloud.servicedirectory.v1.Endpoint value) { if (endpointsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureEndpointsIsMutable(); endpoints_.add(value); onChanged(); } else { endpointsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1.Endpoint endpoints = 1;</code> */ public Builder addEndpoints(int index, com.google.cloud.servicedirectory.v1.Endpoint value) { if (endpointsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureEndpointsIsMutable(); endpoints_.add(index, value); onChanged(); } else { endpointsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1.Endpoint endpoints = 1;</code> */ public Builder addEndpoints( com.google.cloud.servicedirectory.v1.Endpoint.Builder builderForValue) { if (endpointsBuilder_ == null) { ensureEndpointsIsMutable(); endpoints_.add(builderForValue.build()); onChanged(); } else { endpointsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1.Endpoint endpoints = 1;</code> */ public Builder addEndpoints( int index, com.google.cloud.servicedirectory.v1.Endpoint.Builder builderForValue) { if (endpointsBuilder_ == null) { ensureEndpointsIsMutable(); endpoints_.add(index, builderForValue.build()); onChanged(); } else { endpointsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1.Endpoint endpoints = 1;</code> */ public Builder addAllEndpoints( java.lang.Iterable<? extends com.google.cloud.servicedirectory.v1.Endpoint> values) { if (endpointsBuilder_ == null) { ensureEndpointsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, endpoints_); onChanged(); } else { endpointsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1.Endpoint endpoints = 1;</code> */ public Builder clearEndpoints() { if (endpointsBuilder_ == null) { endpoints_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { endpointsBuilder_.clear(); } return this; } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1.Endpoint endpoints = 1;</code> */ public Builder removeEndpoints(int index) { if (endpointsBuilder_ == null) { ensureEndpointsIsMutable(); endpoints_.remove(index); onChanged(); } else { endpointsBuilder_.remove(index); } return this; } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1.Endpoint endpoints = 1;</code> */ public com.google.cloud.servicedirectory.v1.Endpoint.Builder getEndpointsBuilder(int index) { return getEndpointsFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1.Endpoint endpoints = 1;</code> */ public com.google.cloud.servicedirectory.v1.EndpointOrBuilder getEndpointsOrBuilder(int index) { if (endpointsBuilder_ == null) { return endpoints_.get(index); } else { return endpointsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1.Endpoint endpoints = 1;</code> */ public java.util.List<? extends com.google.cloud.servicedirectory.v1.EndpointOrBuilder> getEndpointsOrBuilderList() { if (endpointsBuilder_ != null) { return endpointsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(endpoints_); } } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1.Endpoint endpoints = 1;</code> */ public com.google.cloud.servicedirectory.v1.Endpoint.Builder addEndpointsBuilder() { return getEndpointsFieldBuilder() .addBuilder(com.google.cloud.servicedirectory.v1.Endpoint.getDefaultInstance()); } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1.Endpoint endpoints = 1;</code> */ public com.google.cloud.servicedirectory.v1.Endpoint.Builder addEndpointsBuilder(int index) { return getEndpointsFieldBuilder() .addBuilder(index, com.google.cloud.servicedirectory.v1.Endpoint.getDefaultInstance()); } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1.Endpoint endpoints = 1;</code> */ public java.util.List<com.google.cloud.servicedirectory.v1.Endpoint.Builder> getEndpointsBuilderList() { return getEndpointsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.servicedirectory.v1.Endpoint, com.google.cloud.servicedirectory.v1.Endpoint.Builder, com.google.cloud.servicedirectory.v1.EndpointOrBuilder> getEndpointsFieldBuilder() { if (endpointsBuilder_ == null) { endpointsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.servicedirectory.v1.Endpoint, com.google.cloud.servicedirectory.v1.Endpoint.Builder, com.google.cloud.servicedirectory.v1.EndpointOrBuilder>( endpoints_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); endpoints_ = null; } return endpointsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.servicedirectory.v1.ListEndpointsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.servicedirectory.v1.ListEndpointsResponse) private static final com.google.cloud.servicedirectory.v1.ListEndpointsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.servicedirectory.v1.ListEndpointsResponse(); } public static com.google.cloud.servicedirectory.v1.ListEndpointsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListEndpointsResponse> PARSER = new com.google.protobuf.AbstractParser<ListEndpointsResponse>() { @java.lang.Override public ListEndpointsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListEndpointsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListEndpointsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.servicedirectory.v1.ListEndpointsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/kafka
36,637
streams/integration-tests/src/test/java/org/apache/kafka/streams/integration/KStreamRepartitionIntegrationTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.streams.integration; import org.apache.kafka.clients.admin.Admin; import org.apache.kafka.clients.admin.AdminClientConfig; import org.apache.kafka.clients.admin.TopicDescription; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.common.serialization.Deserializer; import org.apache.kafka.common.serialization.IntegerDeserializer; import org.apache.kafka.common.serialization.IntegerSerializer; import org.apache.kafka.common.serialization.LongDeserializer; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.common.serialization.StringDeserializer; import org.apache.kafka.common.serialization.StringSerializer; import org.apache.kafka.common.utils.Utils; import org.apache.kafka.streams.GroupProtocol; import org.apache.kafka.streams.KafkaStreams; import org.apache.kafka.streams.KeyValue; import org.apache.kafka.streams.StreamsBuilder; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.integration.utils.EmbeddedKafkaCluster; import org.apache.kafka.streams.integration.utils.IntegrationTestUtils; import org.apache.kafka.streams.kstream.Consumed; import org.apache.kafka.streams.kstream.JoinWindows; import org.apache.kafka.streams.kstream.KStream; import org.apache.kafka.streams.kstream.Named; import org.apache.kafka.streams.kstream.Repartitioned; import org.apache.kafka.streams.processor.StreamPartitioner; import org.apache.kafka.test.TestUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.TestInfo; import org.junit.jupiter.api.Timeout; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import java.io.File; import java.io.IOException; import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.Objects; import java.util.Optional; import java.util.Properties; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; import static org.apache.kafka.streams.KafkaStreams.State.ERROR; import static org.apache.kafka.streams.KafkaStreams.State.REBALANCING; import static org.apache.kafka.streams.KafkaStreams.State.RUNNING; import static org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse.SHUTDOWN_CLIENT; import static org.apache.kafka.streams.utils.TestUtils.safeUniqueTestName; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; @SuppressWarnings("deprecation") @Tag("integration") @Timeout(600) public class KStreamRepartitionIntegrationTest { private static final int NUM_BROKERS = 1; public static final EmbeddedKafkaCluster CLUSTER = new EmbeddedKafkaCluster(NUM_BROKERS); @BeforeAll public static void startCluster() throws IOException { CLUSTER.start(); } @AfterAll public static void closeCluster() { CLUSTER.stop(); } private String topicB; private String inputTopic; private String outputTopic; private String applicationId; private String safeTestName; private List<KafkaStreams> kafkaStreamsInstances; private final File testFolder = TestUtils.tempDirectory(); @BeforeEach public void before(final TestInfo testInfo) throws InterruptedException { kafkaStreamsInstances = new ArrayList<>(); safeTestName = safeUniqueTestName(testInfo); topicB = "topic-b-" + safeTestName; inputTopic = "input-topic-" + safeTestName; outputTopic = "output-topic-" + safeTestName; applicationId = "app-" + safeTestName; CLUSTER.createTopic(inputTopic, 4, 1); CLUSTER.createTopic(outputTopic, 1, 1); } private Properties createStreamsConfig(final String topologyOptimization, final boolean useNewProtocol) { final Properties streamsConfiguration = new Properties(); streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, applicationId); streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers()); streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, testFolder.getPath()); streamsConfiguration.put(StreamsConfig.STATESTORE_CACHE_MAX_BYTES_CONFIG, 0); streamsConfiguration.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 100L); streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.Integer().getClass()); streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass()); streamsConfiguration.put(StreamsConfig.TOPOLOGY_OPTIMIZATION_CONFIG, topologyOptimization); if (useNewProtocol) { streamsConfiguration.put(StreamsConfig.GROUP_PROTOCOL_CONFIG, GroupProtocol.STREAMS.name().toLowerCase(Locale.getDefault())); } return streamsConfiguration; } private static Stream<Arguments> protocolAndOptimizationParameters() { return Stream.of( Arguments.of(StreamsConfig.OPTIMIZE, false), // OPTIMIZE with CLASSIC protocol Arguments.of(StreamsConfig.OPTIMIZE, true), // OPTIMIZE with STREAMS protocol Arguments.of(StreamsConfig.NO_OPTIMIZATION, false), // NO_OPTIMIZATION with CLASSIC protocol Arguments.of(StreamsConfig.NO_OPTIMIZATION, true) // NO_OPTIMIZATION with STREAMS protocol ); } @AfterEach public void whenShuttingDown() throws IOException { kafkaStreamsInstances.stream() .filter(Objects::nonNull) .forEach(ks -> ks.close(Duration.ofSeconds(60))); Utils.delete(testFolder); } @ParameterizedTest @MethodSource("protocolAndOptimizationParameters") public void shouldThrowAnExceptionWhenNumberOfPartitionsOfRepartitionOperationDoNotMatchSourceTopicWhenJoining(final String topologyOptimization, final boolean useNewProtocol) throws InterruptedException { final int topicBNumberOfPartitions = 6; final String inputTopicRepartitionName = "join-repartition-test"; final AtomicReference<Throwable> expectedThrowable = new AtomicReference<>(); final int inputTopicRepartitionedNumOfPartitions = 2; CLUSTER.createTopic(topicB, topicBNumberOfPartitions, 1); final StreamsBuilder builder = new StreamsBuilder(); final Repartitioned<Integer, String> inputTopicRepartitioned = Repartitioned .<Integer, String>as(inputTopicRepartitionName) .withNumberOfPartitions(inputTopicRepartitionedNumOfPartitions); final KStream<Integer, String> topicBStream = builder .stream(topicB, Consumed.with(Serdes.Integer(), Serdes.String())); builder.stream(inputTopic, Consumed.with(Serdes.Integer(), Serdes.String())) .repartition(inputTopicRepartitioned) .join(topicBStream, (value1, value2) -> value2, JoinWindows.ofTimeDifferenceWithNoGrace(Duration.ofSeconds(10))) .to(outputTopic); final Properties streamsConfiguration = createStreamsConfig(topologyOptimization, useNewProtocol); try (final KafkaStreams ks = new KafkaStreams(builder.build(streamsConfiguration), streamsConfiguration)) { ks.setUncaughtExceptionHandler(exception -> { expectedThrowable.set(exception); System.out.println(String.format("[%s Protocol] Exception caught: %s", useNewProtocol ? "STREAMS" : "CLASSIC", exception.getMessage())); return SHUTDOWN_CLIENT; }); ks.start(); TestUtils.waitForCondition(() -> ks.state() == ERROR, 30_000, "Kafka Streams never went into error state"); final String expectedMsg = String.format("Number of partitions [%s] of repartition topic [%s] " + "doesn't match number of partitions [%s] of the source topic.", inputTopicRepartitionedNumOfPartitions, toRepartitionTopicName(inputTopicRepartitionName), topicBNumberOfPartitions); assertNotNull(expectedThrowable.get()); assertTrue(expectedThrowable.get().getMessage().contains(expectedMsg)); } } @ParameterizedTest @MethodSource("protocolAndOptimizationParameters") public void shouldDeductNumberOfPartitionsFromRepartitionOperation(final String topologyOptimization, final boolean useNewProtocol) throws Exception { final String topicBMapperName = "topic-b-mapper"; final int topicBNumberOfPartitions = 6; final String inputTopicRepartitionName = "join-repartition-test"; final int inputTopicRepartitionedNumOfPartitions = 3; final long timestamp = System.currentTimeMillis(); CLUSTER.createTopic(topicB, topicBNumberOfPartitions, 1); final List<KeyValue<Integer, String>> expectedRecords = Arrays.asList( new KeyValue<>(1, "A"), new KeyValue<>(2, "B") ); sendEvents(timestamp, expectedRecords); sendEvents(topicB, timestamp, expectedRecords); final StreamsBuilder builder = new StreamsBuilder(); final Repartitioned<Integer, String> inputTopicRepartitioned = Repartitioned .<Integer, String>as(inputTopicRepartitionName) .withNumberOfPartitions(inputTopicRepartitionedNumOfPartitions); final KStream<Integer, String> topicBStream = builder .stream(topicB, Consumed.with(Serdes.Integer(), Serdes.String())) .map(KeyValue::new, Named.as(topicBMapperName)); builder.stream(inputTopic, Consumed.with(Serdes.Integer(), Serdes.String())) .repartition(inputTopicRepartitioned) .join(topicBStream, (value1, value2) -> value2, JoinWindows.of(Duration.ofSeconds(10))) .to(outputTopic); final Properties streamsConfiguration = createStreamsConfig(topologyOptimization, useNewProtocol); builder.build(streamsConfiguration); startStreams(builder, streamsConfiguration); assertEquals(inputTopicRepartitionedNumOfPartitions, getNumberOfPartitionsForTopic(toRepartitionTopicName(inputTopicRepartitionName))); assertEquals(inputTopicRepartitionedNumOfPartitions, getNumberOfPartitionsForTopic(toRepartitionTopicName(topicBMapperName))); validateReceivedMessages( new IntegerDeserializer(), new StringDeserializer(), expectedRecords ); } @ParameterizedTest @MethodSource("protocolAndOptimizationParameters") public void shouldDoProperJoiningWhenNumberOfPartitionsAreValidWhenUsingRepartitionOperation(final String topologyOptimization, final boolean useNewProtocol) throws Exception { final String topicBRepartitionedName = "topic-b-scale-up"; final String inputTopicRepartitionedName = "input-topic-scale-up"; final long timestamp = System.currentTimeMillis(); CLUSTER.createTopic(topicB, 1, 1); final List<KeyValue<Integer, String>> expectedRecords = Arrays.asList( new KeyValue<>(1, "A"), new KeyValue<>(2, "B") ); final List<KeyValue<Integer, String>> recordsToSend = new ArrayList<>(expectedRecords); recordsToSend.add(new KeyValue<>(null, "C")); sendEvents(timestamp, recordsToSend); sendEvents(topicB, timestamp, recordsToSend); final StreamsBuilder builder = new StreamsBuilder(); final Repartitioned<Integer, String> inputTopicRepartitioned = Repartitioned .<Integer, String>as(inputTopicRepartitionedName) .withNumberOfPartitions(4); final Repartitioned<Integer, String> topicBRepartitioned = Repartitioned .<Integer, String>as(topicBRepartitionedName) .withNumberOfPartitions(4); final KStream<Integer, String> topicBStream = builder .stream(topicB, Consumed.with(Serdes.Integer(), Serdes.String())) .repartition(topicBRepartitioned); builder.stream(inputTopic, Consumed.with(Serdes.Integer(), Serdes.String())) .repartition(inputTopicRepartitioned) .join(topicBStream, (value1, value2) -> value2, JoinWindows.of(Duration.ofSeconds(10))) .to(outputTopic); startStreams(builder, createStreamsConfig(topologyOptimization, useNewProtocol)); assertEquals(4, getNumberOfPartitionsForTopic(toRepartitionTopicName(topicBRepartitionedName))); assertEquals(4, getNumberOfPartitionsForTopic(toRepartitionTopicName(inputTopicRepartitionedName))); validateReceivedMessages( new IntegerDeserializer(), new StringDeserializer(), expectedRecords ); } @ParameterizedTest @MethodSource("protocolAndOptimizationParameters") public void shouldRepartitionToMultiplePartitions(final String topologyOptimization, final boolean useNewProtocol) throws Exception { final String repartitionName = "broadcasting-partitioner-test"; final long timestamp = System.currentTimeMillis(); final AtomicInteger partitionerInvocation = new AtomicInteger(0); // This test needs to write to an output topic with 4 partitions. Hence, creating a new one final String broadcastingOutputTopic = "broadcast-output-topic-" + safeTestName; CLUSTER.createTopic(broadcastingOutputTopic, 4, 1); final List<KeyValue<Integer, String>> expectedRecordsOnRepartition = Arrays.asList( new KeyValue<>(1, "A"), new KeyValue<>(1, "A"), new KeyValue<>(1, "A"), new KeyValue<>(1, "A"), new KeyValue<>(2, "B"), new KeyValue<>(2, "B"), new KeyValue<>(2, "B"), new KeyValue<>(2, "B") ); final List<KeyValue<Integer, String>> expectedRecords = expectedRecordsOnRepartition.subList(3, 5); class BroadcastingPartitioner implements StreamPartitioner<Integer, String> { @Override public Optional<Set<Integer>> partitions(final String topic, final Integer key, final String value, final int numPartitions) { partitionerInvocation.incrementAndGet(); return Optional.of(IntStream.range(0, numPartitions).boxed().collect(Collectors.toSet())); } } sendEvents(timestamp, expectedRecords); final StreamsBuilder builder = new StreamsBuilder(); final Repartitioned<Integer, String> repartitioned = Repartitioned .<Integer, String>as(repartitionName) .withStreamPartitioner(new BroadcastingPartitioner()); builder.stream(inputTopic, Consumed.with(Serdes.Integer(), Serdes.String())) .repartition(repartitioned) .to(broadcastingOutputTopic); startStreams(builder, createStreamsConfig(topologyOptimization, useNewProtocol)); final String topic = toRepartitionTopicName(repartitionName); // Both records should be there on all 4 partitions of repartition and output topic validateReceivedMessages( new IntegerDeserializer(), new StringDeserializer(), expectedRecordsOnRepartition, topic ); validateReceivedMessages( new IntegerDeserializer(), new StringDeserializer(), expectedRecordsOnRepartition, broadcastingOutputTopic ); assertTrue(topicExists(topic)); assertEquals(expectedRecords.size(), partitionerInvocation.get()); } @ParameterizedTest @MethodSource("protocolAndOptimizationParameters") public void shouldUseStreamPartitionerForRepartitionOperation(final String topologyOptimization, final boolean useNewProtocol) throws Exception { final int partition = 1; final String repartitionName = "partitioner-test"; final long timestamp = System.currentTimeMillis(); final AtomicInteger partitionerInvocation = new AtomicInteger(0); final List<KeyValue<Integer, String>> expectedRecords = Arrays.asList( new KeyValue<>(1, "A"), new KeyValue<>(2, "B") ); sendEvents(timestamp, expectedRecords); final StreamsBuilder builder = new StreamsBuilder(); final Repartitioned<Integer, String> repartitioned = Repartitioned .<Integer, String>as(repartitionName) .withStreamPartitioner((topic, key, value, numPartitions) -> { partitionerInvocation.incrementAndGet(); return Optional.of(Collections.singleton(partition)); }); builder.stream(inputTopic, Consumed.with(Serdes.Integer(), Serdes.String())) .repartition(repartitioned) .to(outputTopic); startStreams(builder, createStreamsConfig(topologyOptimization, useNewProtocol)); final String topic = toRepartitionTopicName(repartitionName); validateReceivedMessages( new IntegerDeserializer(), new StringDeserializer(), expectedRecords ); assertTrue(topicExists(topic)); assertEquals(expectedRecords.size(), partitionerInvocation.get()); } @ParameterizedTest @MethodSource("protocolAndOptimizationParameters") public void shouldPerformSelectKeyWithRepartitionOperation(final String topologyOptimization, final boolean useNewProtocol) throws Exception { final long timestamp = System.currentTimeMillis(); sendEvents( timestamp, Arrays.asList( new KeyValue<>(1, "10"), new KeyValue<>(2, "20") ) ); final StreamsBuilder builder = new StreamsBuilder(); builder.stream(inputTopic, Consumed.with(Serdes.Integer(), Serdes.String())) .selectKey((key, value) -> Integer.valueOf(value)) .repartition() .to(outputTopic); startStreams(builder, createStreamsConfig(topologyOptimization, useNewProtocol)); validateReceivedMessages( new IntegerDeserializer(), new StringDeserializer(), Arrays.asList( new KeyValue<>(10, "10"), new KeyValue<>(20, "20") ) ); final String topology = builder.build().describe().toString(); assertEquals(1, countOccurrencesInTopology(topology, "Sink: .*-repartition.*")); } @ParameterizedTest @MethodSource("protocolAndOptimizationParameters") public void shouldCreateRepartitionTopicIfKeyChangingOperationWasNotPerformed(final String topologyOptimization, final boolean useNewProtocol) throws Exception { final String repartitionName = "dummy"; final long timestamp = System.currentTimeMillis(); sendEvents( timestamp, Arrays.asList( new KeyValue<>(1, "A"), new KeyValue<>(2, "B") ) ); final StreamsBuilder builder = new StreamsBuilder(); builder.stream(inputTopic, Consumed.with(Serdes.Integer(), Serdes.String())) .repartition(Repartitioned.as(repartitionName)) .to(outputTopic); startStreams(builder, createStreamsConfig(topologyOptimization, useNewProtocol)); validateReceivedMessages( new IntegerDeserializer(), new StringDeserializer(), Arrays.asList( new KeyValue<>(1, "A"), new KeyValue<>(2, "B") ) ); final String topology = builder.build().describe().toString(); assertTrue(topicExists(toRepartitionTopicName(repartitionName))); assertEquals(1, countOccurrencesInTopology(topology, "Sink: .*dummy-repartition.*")); } @ParameterizedTest @MethodSource("protocolAndOptimizationParameters") public void shouldPerformKeySelectOperationWhenRepartitionOperationIsUsedWithKeySelector(final String topologyOptimization, final boolean useNewProtocol) throws Exception { final String repartitionedName = "new-key"; final long timestamp = System.currentTimeMillis(); sendEvents( timestamp, Arrays.asList( new KeyValue<>(1, "A"), new KeyValue<>(2, "B") ) ); final StreamsBuilder builder = new StreamsBuilder(); final Repartitioned<String, String> repartitioned = Repartitioned.<String, String>as(repartitionedName) .withKeySerde(Serdes.String()); builder.stream(inputTopic, Consumed.with(Serdes.Integer(), Serdes.String())) .selectKey((key, value) -> key.toString(), Named.as(repartitionedName)) .repartition(repartitioned) .groupByKey() .count() .toStream() .to(outputTopic); startStreams(builder, createStreamsConfig(topologyOptimization, useNewProtocol)); validateReceivedMessages( new StringDeserializer(), new LongDeserializer(), Arrays.asList( new KeyValue<>("1", 1L), new KeyValue<>("2", 1L) ) ); final String topology = builder.build().describe().toString(); final String repartitionTopicName = toRepartitionTopicName(repartitionedName); assertTrue(topicExists(repartitionTopicName)); assertEquals(1, countOccurrencesInTopology(topology, "Sink: .*" + repartitionedName + "-repartition.*")); assertEquals(1, countOccurrencesInTopology(topology, "<-- " + repartitionedName + "\n")); } @ParameterizedTest @MethodSource("protocolAndOptimizationParameters") public void shouldCreateRepartitionTopicWithSpecifiedNumberOfPartitions(final String topologyOptimization, final boolean useNewProtocol) throws Exception { final String repartitionName = "new-partitions"; final long timestamp = System.currentTimeMillis(); sendEvents( timestamp, Arrays.asList( new KeyValue<>(1, "A"), new KeyValue<>(2, "B") ) ); final StreamsBuilder builder = new StreamsBuilder(); builder.stream(inputTopic, Consumed.with(Serdes.Integer(), Serdes.String())) .repartition(Repartitioned.<Integer, String>as(repartitionName).withNumberOfPartitions(2)) .groupByKey() .count() .toStream() .to(outputTopic); startStreams(builder, createStreamsConfig(topologyOptimization, useNewProtocol)); validateReceivedMessages( new IntegerDeserializer(), new LongDeserializer(), Arrays.asList( new KeyValue<>(1, 1L), new KeyValue<>(2, 1L) ) ); final String repartitionTopicName = toRepartitionTopicName(repartitionName); assertTrue(topicExists(repartitionTopicName)); assertEquals(2, getNumberOfPartitionsForTopic(repartitionTopicName)); } @ParameterizedTest @MethodSource("protocolAndOptimizationParameters") public void shouldInheritRepartitionTopicPartitionNumberFromUpstreamTopicWhenNumberOfPartitionsIsNotSpecified(final String topologyOptimization, final boolean useNewProtocol) throws Exception { final String repartitionName = "new-topic"; final long timestamp = System.currentTimeMillis(); sendEvents( timestamp, Arrays.asList( new KeyValue<>(1, "A"), new KeyValue<>(2, "B") ) ); final StreamsBuilder builder = new StreamsBuilder(); builder.stream(inputTopic, Consumed.with(Serdes.Integer(), Serdes.String())) .repartition(Repartitioned.as(repartitionName)) .groupByKey() .count() .toStream() .to(outputTopic); startStreams(builder, createStreamsConfig(topologyOptimization, useNewProtocol)); validateReceivedMessages( new IntegerDeserializer(), new LongDeserializer(), Arrays.asList( new KeyValue<>(1, 1L), new KeyValue<>(2, 1L) ) ); final String repartitionTopicName = toRepartitionTopicName(repartitionName); assertTrue(topicExists(repartitionTopicName)); assertEquals(4, getNumberOfPartitionsForTopic(repartitionTopicName)); } @ParameterizedTest @MethodSource("protocolAndOptimizationParameters") public void shouldCreateOnlyOneRepartitionTopicWhenRepartitionIsFollowedByGroupByKey(final String topologyOptimization, final boolean useNewProtocol) throws Exception { final String repartitionName = "new-partitions"; final long timestamp = System.currentTimeMillis(); sendEvents( timestamp, Arrays.asList( new KeyValue<>(1, "A"), new KeyValue<>(2, "B") ) ); final StreamsBuilder builder = new StreamsBuilder(); final Repartitioned<String, String> repartitioned = Repartitioned.<String, String>as(repartitionName) .withKeySerde(Serdes.String()) .withValueSerde(Serdes.String()) .withNumberOfPartitions(1); builder.stream(inputTopic, Consumed.with(Serdes.Integer(), Serdes.String())) .selectKey((key, value) -> key.toString()) .repartition(repartitioned) .groupByKey() .count() .toStream() .to(outputTopic); startStreams(builder, createStreamsConfig(topologyOptimization, useNewProtocol)); final String topology = builder.build().describe().toString(); validateReceivedMessages( new StringDeserializer(), new LongDeserializer(), Arrays.asList( new KeyValue<>("1", 1L), new KeyValue<>("2", 1L) ) ); assertTrue(topicExists(toRepartitionTopicName(repartitionName))); assertEquals(1, countOccurrencesInTopology(topology, "Sink: .*-repartition")); } @ParameterizedTest @MethodSource("protocolAndOptimizationParameters") public void shouldGenerateRepartitionTopicWhenNameIsNotSpecified(final String topologyOptimization, final boolean useNewProtocol) throws Exception { final long timestamp = System.currentTimeMillis(); sendEvents( timestamp, Arrays.asList( new KeyValue<>(1, "A"), new KeyValue<>(2, "B") ) ); final StreamsBuilder builder = new StreamsBuilder(); builder.stream(inputTopic, Consumed.with(Serdes.Integer(), Serdes.String())) .selectKey((key, value) -> key.toString()) .repartition(Repartitioned.with(Serdes.String(), Serdes.String())) .to(outputTopic); startStreams(builder, createStreamsConfig(topologyOptimization, useNewProtocol)); validateReceivedMessages( new StringDeserializer(), new StringDeserializer(), Arrays.asList( new KeyValue<>("1", "A"), new KeyValue<>("2", "B") ) ); final String topology = builder.build().describe().toString(); assertEquals(1, countOccurrencesInTopology(topology, "Sink: .*-repartition")); } @ParameterizedTest @MethodSource("protocolAndOptimizationParameters") public void shouldGoThroughRebalancingCorrectly(final String topologyOptimization, final boolean useNewProtocol) throws Exception { final String repartitionName = "rebalancing-test"; final long timestamp = System.currentTimeMillis(); sendEvents( timestamp, Arrays.asList( new KeyValue<>(1, "A"), new KeyValue<>(2, "B") ) ); final StreamsBuilder builder = new StreamsBuilder(); final Repartitioned<String, String> repartitioned = Repartitioned.<String, String>as(repartitionName) .withKeySerde(Serdes.String()) .withValueSerde(Serdes.String()) .withNumberOfPartitions(2); builder.stream(inputTopic, Consumed.with(Serdes.Integer(), Serdes.String())) .selectKey((key, value) -> key.toString()) .repartition(repartitioned) .groupByKey() .count() .toStream() .to(outputTopic); final Properties streamsConfiguration = createStreamsConfig(topologyOptimization, useNewProtocol); startStreams(builder, streamsConfiguration); final Properties streamsToCloseConfigs = new Properties(); streamsToCloseConfigs.putAll(streamsConfiguration); streamsToCloseConfigs.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getPath() + "-2"); final KafkaStreams kafkaStreamsToClose = startStreams(builder, streamsToCloseConfigs); validateReceivedMessages( new StringDeserializer(), new LongDeserializer(), Arrays.asList( new KeyValue<>("1", 1L), new KeyValue<>("2", 1L) ) ); kafkaStreamsToClose.close(Duration.ofSeconds(5)); sendEvents( timestamp, Arrays.asList( new KeyValue<>(1, "C"), new KeyValue<>(2, "D") ) ); validateReceivedMessages( new StringDeserializer(), new LongDeserializer(), Arrays.asList( new KeyValue<>("1", 2L), new KeyValue<>("2", 2L) ) ); final String repartitionTopicName = toRepartitionTopicName(repartitionName); assertTrue(topicExists(repartitionTopicName)); assertEquals(2, getNumberOfPartitionsForTopic(repartitionTopicName)); } private int getNumberOfPartitionsForTopic(final String topic) throws Exception { try (final Admin adminClient = createAdminClient()) { final TopicDescription topicDescription = adminClient.describeTopics(Collections.singleton(topic)) .topicNameValues() .get(topic) .get(); return topicDescription.partitions().size(); } } private boolean topicExists(final String topic) throws Exception { try (final Admin adminClient = createAdminClient()) { final Set<String> topics = adminClient.listTopics() .names() .get(); return topics.contains(topic); } } private String toRepartitionTopicName(final String input) { return applicationId + "-" + input + "-repartition"; } private static Admin createAdminClient() { final Properties properties = new Properties(); properties.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers()); return Admin.create(properties); } private static int countOccurrencesInTopology(final String topologyString, final String searchPattern) { final Matcher matcher = Pattern.compile(searchPattern).matcher(topologyString); final List<String> repartitionTopicsFound = new ArrayList<>(); while (matcher.find()) { repartitionTopicsFound.add(matcher.group()); } return repartitionTopicsFound.size(); } private void sendEvents(final long timestamp, final List<KeyValue<Integer, String>> events) { sendEvents(inputTopic, timestamp, events); } private void sendEvents(final String topic, final long timestamp, final List<KeyValue<Integer, String>> events) { IntegrationTestUtils.produceKeyValuesSynchronouslyWithTimestamp( topic, events, TestUtils.producerConfig( CLUSTER.bootstrapServers(), IntegerSerializer.class, StringSerializer.class, new Properties() ), timestamp ); } private KafkaStreams startStreams(final StreamsBuilder builder, final Properties streamsConfiguration) throws InterruptedException { final CountDownLatch latch; final KafkaStreams kafkaStreams = new KafkaStreams(builder.build(streamsConfiguration), streamsConfiguration); latch = new CountDownLatch(1); kafkaStreams.setStateListener((newState, oldState) -> { if (REBALANCING == oldState && RUNNING == newState) { latch.countDown(); } }); kafkaStreams.start(); latch.await(IntegrationTestUtils.DEFAULT_TIMEOUT, TimeUnit.MILLISECONDS); kafkaStreamsInstances.add(kafkaStreams); return kafkaStreams; } private <K, V> void validateReceivedMessages(final Deserializer<K> keySerializer, final Deserializer<V> valueSerializer, final List<KeyValue<K, V>> expectedRecords) throws Exception { validateReceivedMessages(keySerializer, valueSerializer, expectedRecords, outputTopic); } private <K, V> void validateReceivedMessages(final Deserializer<K> keySerializer, final Deserializer<V> valueSerializer, final List<KeyValue<K, V>> expectedRecords, final String outputTopic) throws Exception { final Properties consumerProperties = new Properties(); consumerProperties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers()); consumerProperties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "group-" + safeTestName); consumerProperties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); consumerProperties.setProperty( ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, keySerializer.getClass().getName() ); consumerProperties.setProperty( ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, valueSerializer.getClass().getName() ); IntegrationTestUtils.waitUntilFinalKeyValueRecordsReceived( consumerProperties, outputTopic, expectedRecords ); } }
googleapis/google-api-java-client-services
36,702
clients/google-api-services-mybusinessbusinesscalls/v1/1.31.0/com/google/api/services/mybusinessbusinesscalls/v1/MyBusinessBusinessCalls.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.mybusinessbusinesscalls.v1; /** * Service definition for MyBusinessBusinessCalls (v1). * * <p> * The My Business Business Calls API manages business calls information of a location on Google and collect insights like the number of missed calls to their location. Additional information about Business calls can be found at https://support.google.com/business/answer/9688285?p=call_history. If the Google Business Profile links to a Google Ads account and call history is turned on, calls that last longer than a specific time, and that can be attributed to an ad interaction, will show in the linked Google Ads account under the "Calls from Ads" conversion. If smart bidding and call conversions are used in the optimization strategy, there could be a change in ad spend. Learn more about smart bidding. To view and perform actions on a location's calls, you need to be a `OWNER`, `CO_OWNER` or `MANAGER` of the location. * </p> * * <p> * For more information about this service, see the * <a href="https://developers.google.com/my-business/" target="_blank">API Documentation</a> * </p> * * <p> * This service uses {@link MyBusinessBusinessCallsRequestInitializer} to initialize global parameters via its * {@link Builder}. * </p> * * @since 1.3 * @author Google, Inc. */ @SuppressWarnings("javadoc") public class MyBusinessBusinessCalls extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient { // Note: Leave this static initializer at the top of the file. static { com.google.api.client.util.Preconditions.checkState( com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION == 1 && (com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION >= 32 || (com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION == 31 && com.google.api.client.googleapis.GoogleUtils.BUGFIX_VERSION >= 1)), "You are currently running with version %s of google-api-client. " + "You need at least version 1.31.1 of google-api-client to run version " + "1.32.1 of the My Business Business Calls API library.", com.google.api.client.googleapis.GoogleUtils.VERSION); } /** * The default encoded root URL of the service. This is determined when the library is generated * and normally should not be changed. * * @since 1.7 */ public static final String DEFAULT_ROOT_URL = "https://mybusinessbusinesscalls.googleapis.com/"; /** * The default encoded mTLS root URL of the service. This is determined when the library is generated * and normally should not be changed. * * @since 1.31 */ public static final String DEFAULT_MTLS_ROOT_URL = "https://mybusinessbusinesscalls.mtls.googleapis.com/"; /** * The default encoded service path of the service. This is determined when the library is * generated and normally should not be changed. * * @since 1.7 */ public static final String DEFAULT_SERVICE_PATH = ""; /** * The default encoded batch path of the service. This is determined when the library is * generated and normally should not be changed. * * @since 1.23 */ public static final String DEFAULT_BATCH_PATH = "batch"; /** * The default encoded base URL of the service. This is determined when the library is generated * and normally should not be changed. */ public static final String DEFAULT_BASE_URL = DEFAULT_ROOT_URL + DEFAULT_SERVICE_PATH; /** * Constructor. * * <p> * Use {@link Builder} if you need to specify any of the optional parameters. * </p> * * @param transport HTTP transport, which should normally be: * <ul> * <li>Google App Engine: * {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li> * <li>Android: {@code newCompatibleTransport} from * {@code com.google.api.client.extensions.android.http.AndroidHttp}</li> * <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()} * </li> * </ul> * @param jsonFactory JSON factory, which may be: * <ul> * <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li> * <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li> * <li>Android Honeycomb or higher: * {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li> * </ul> * @param httpRequestInitializer HTTP request initializer or {@code null} for none * @since 1.7 */ public MyBusinessBusinessCalls(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory, com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { this(new Builder(transport, jsonFactory, httpRequestInitializer)); } /** * @param builder builder */ MyBusinessBusinessCalls(Builder builder) { super(builder); } @Override protected void initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest<?> httpClientRequest) throws java.io.IOException { super.initialize(httpClientRequest); } /** * An accessor for creating requests from the Locations collection. * * <p>The typical use is:</p> * <pre> * {@code MyBusinessBusinessCalls mybusinessbusinesscalls = new MyBusinessBusinessCalls(...);} * {@code MyBusinessBusinessCalls.Locations.List request = mybusinessbusinesscalls.locations().list(parameters ...)} * </pre> * * @return the resource collection */ public Locations locations() { return new Locations(); } /** * The "locations" collection of methods. */ public class Locations { /** * Returns the Business calls settings resource for the given location. * * Create a request for the method "locations.getBusinesscallssettings". * * This request holds the parameters needed by the mybusinessbusinesscalls server. After setting * any optional parameters, call the {@link GetBusinesscallssettings#execute()} method to invoke the * remote operation. * * @param name Required. The BusinessCallsSettings to get. The `name` field is used to identify the business call * settings to get. Format: locations/{location_id}/businesscallssettings. * @return the request */ public GetBusinesscallssettings getBusinesscallssettings(java.lang.String name) throws java.io.IOException { GetBusinesscallssettings result = new GetBusinesscallssettings(name); initialize(result); return result; } public class GetBusinesscallssettings extends MyBusinessBusinessCallsRequest<com.google.api.services.mybusinessbusinesscalls.v1.model.BusinessCallsSettings> { private static final String REST_PATH = "v1/{+name}"; private final java.util.regex.Pattern NAME_PATTERN = java.util.regex.Pattern.compile("^locations/[^/]+/businesscallssettings$"); /** * Returns the Business calls settings resource for the given location. * * Create a request for the method "locations.getBusinesscallssettings". * * This request holds the parameters needed by the the mybusinessbusinesscalls server. After * setting any optional parameters, call the {@link GetBusinesscallssettings#execute()} method to * invoke the remote operation. <p> {@link GetBusinesscallssettings#initialize(com.google.api.clie * nt.googleapis.services.AbstractGoogleClientRequest)} must be called to initialize this instance * immediately after invoking the constructor. </p> * * @param name Required. The BusinessCallsSettings to get. The `name` field is used to identify the business call * settings to get. Format: locations/{location_id}/businesscallssettings. * @since 1.13 */ protected GetBusinesscallssettings(java.lang.String name) { super(MyBusinessBusinessCalls.this, "GET", REST_PATH, null, com.google.api.services.mybusinessbusinesscalls.v1.model.BusinessCallsSettings.class); this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified."); if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^locations/[^/]+/businesscallssettings$"); } } @Override public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException { return super.executeUsingHead(); } @Override public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException { return super.buildHttpRequestUsingHead(); } @Override public GetBusinesscallssettings set$Xgafv(java.lang.String $Xgafv) { return (GetBusinesscallssettings) super.set$Xgafv($Xgafv); } @Override public GetBusinesscallssettings setAccessToken(java.lang.String accessToken) { return (GetBusinesscallssettings) super.setAccessToken(accessToken); } @Override public GetBusinesscallssettings setAlt(java.lang.String alt) { return (GetBusinesscallssettings) super.setAlt(alt); } @Override public GetBusinesscallssettings setCallback(java.lang.String callback) { return (GetBusinesscallssettings) super.setCallback(callback); } @Override public GetBusinesscallssettings setFields(java.lang.String fields) { return (GetBusinesscallssettings) super.setFields(fields); } @Override public GetBusinesscallssettings setKey(java.lang.String key) { return (GetBusinesscallssettings) super.setKey(key); } @Override public GetBusinesscallssettings setOauthToken(java.lang.String oauthToken) { return (GetBusinesscallssettings) super.setOauthToken(oauthToken); } @Override public GetBusinesscallssettings setPrettyPrint(java.lang.Boolean prettyPrint) { return (GetBusinesscallssettings) super.setPrettyPrint(prettyPrint); } @Override public GetBusinesscallssettings setQuotaUser(java.lang.String quotaUser) { return (GetBusinesscallssettings) super.setQuotaUser(quotaUser); } @Override public GetBusinesscallssettings setUploadType(java.lang.String uploadType) { return (GetBusinesscallssettings) super.setUploadType(uploadType); } @Override public GetBusinesscallssettings setUploadProtocol(java.lang.String uploadProtocol) { return (GetBusinesscallssettings) super.setUploadProtocol(uploadProtocol); } /** * Required. The BusinessCallsSettings to get. The `name` field is used to identify the * business call settings to get. Format: locations/{location_id}/businesscallssettings. */ @com.google.api.client.util.Key private java.lang.String name; /** Required. The BusinessCallsSettings to get. The `name` field is used to identify the business call settings to get. Format: locations/{location_id}/businesscallssettings. */ public java.lang.String getName() { return name; } /** * Required. The BusinessCallsSettings to get. The `name` field is used to identify the * business call settings to get. Format: locations/{location_id}/businesscallssettings. */ public GetBusinesscallssettings setName(java.lang.String name) { if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^locations/[^/]+/businesscallssettings$"); } this.name = name; return this; } @Override public GetBusinesscallssettings set(String parameterName, Object value) { return (GetBusinesscallssettings) super.set(parameterName, value); } } /** * Updates the Business call settings for the specified location. * * Create a request for the method "locations.updateBusinesscallssettings". * * This request holds the parameters needed by the mybusinessbusinesscalls server. After setting * any optional parameters, call the {@link UpdateBusinesscallssettings#execute()} method to invoke * the remote operation. * * @param name Required. The resource name of the calls settings. Format: * locations/{location}/businesscallssettings * @param content the {@link com.google.api.services.mybusinessbusinesscalls.v1.model.BusinessCallsSettings} * @return the request */ public UpdateBusinesscallssettings updateBusinesscallssettings(java.lang.String name, com.google.api.services.mybusinessbusinesscalls.v1.model.BusinessCallsSettings content) throws java.io.IOException { UpdateBusinesscallssettings result = new UpdateBusinesscallssettings(name, content); initialize(result); return result; } public class UpdateBusinesscallssettings extends MyBusinessBusinessCallsRequest<com.google.api.services.mybusinessbusinesscalls.v1.model.BusinessCallsSettings> { private static final String REST_PATH = "v1/{+name}"; private final java.util.regex.Pattern NAME_PATTERN = java.util.regex.Pattern.compile("^locations/[^/]+/businesscallssettings$"); /** * Updates the Business call settings for the specified location. * * Create a request for the method "locations.updateBusinesscallssettings". * * This request holds the parameters needed by the the mybusinessbusinesscalls server. After * setting any optional parameters, call the {@link UpdateBusinesscallssettings#execute()} method * to invoke the remote operation. <p> {@link UpdateBusinesscallssettings#initialize(com.google.ap * i.client.googleapis.services.AbstractGoogleClientRequest)} must be called to initialize this * instance immediately after invoking the constructor. </p> * * @param name Required. The resource name of the calls settings. Format: * locations/{location}/businesscallssettings * @param content the {@link com.google.api.services.mybusinessbusinesscalls.v1.model.BusinessCallsSettings} * @since 1.13 */ protected UpdateBusinesscallssettings(java.lang.String name, com.google.api.services.mybusinessbusinesscalls.v1.model.BusinessCallsSettings content) { super(MyBusinessBusinessCalls.this, "PATCH", REST_PATH, content, com.google.api.services.mybusinessbusinesscalls.v1.model.BusinessCallsSettings.class); this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified."); if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^locations/[^/]+/businesscallssettings$"); } } @Override public UpdateBusinesscallssettings set$Xgafv(java.lang.String $Xgafv) { return (UpdateBusinesscallssettings) super.set$Xgafv($Xgafv); } @Override public UpdateBusinesscallssettings setAccessToken(java.lang.String accessToken) { return (UpdateBusinesscallssettings) super.setAccessToken(accessToken); } @Override public UpdateBusinesscallssettings setAlt(java.lang.String alt) { return (UpdateBusinesscallssettings) super.setAlt(alt); } @Override public UpdateBusinesscallssettings setCallback(java.lang.String callback) { return (UpdateBusinesscallssettings) super.setCallback(callback); } @Override public UpdateBusinesscallssettings setFields(java.lang.String fields) { return (UpdateBusinesscallssettings) super.setFields(fields); } @Override public UpdateBusinesscallssettings setKey(java.lang.String key) { return (UpdateBusinesscallssettings) super.setKey(key); } @Override public UpdateBusinesscallssettings setOauthToken(java.lang.String oauthToken) { return (UpdateBusinesscallssettings) super.setOauthToken(oauthToken); } @Override public UpdateBusinesscallssettings setPrettyPrint(java.lang.Boolean prettyPrint) { return (UpdateBusinesscallssettings) super.setPrettyPrint(prettyPrint); } @Override public UpdateBusinesscallssettings setQuotaUser(java.lang.String quotaUser) { return (UpdateBusinesscallssettings) super.setQuotaUser(quotaUser); } @Override public UpdateBusinesscallssettings setUploadType(java.lang.String uploadType) { return (UpdateBusinesscallssettings) super.setUploadType(uploadType); } @Override public UpdateBusinesscallssettings setUploadProtocol(java.lang.String uploadProtocol) { return (UpdateBusinesscallssettings) super.setUploadProtocol(uploadProtocol); } /** * Required. The resource name of the calls settings. Format: * locations/{location}/businesscallssettings */ @com.google.api.client.util.Key private java.lang.String name; /** Required. The resource name of the calls settings. Format: locations/{location}/businesscallssettings */ public java.lang.String getName() { return name; } /** * Required. The resource name of the calls settings. Format: * locations/{location}/businesscallssettings */ public UpdateBusinesscallssettings setName(java.lang.String name) { if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^locations/[^/]+/businesscallssettings$"); } this.name = name; return this; } /** Required. The list of fields to update. */ @com.google.api.client.util.Key private String updateMask; /** Required. The list of fields to update. */ public String getUpdateMask() { return updateMask; } /** Required. The list of fields to update. */ public UpdateBusinesscallssettings setUpdateMask(String updateMask) { this.updateMask = updateMask; return this; } @Override public UpdateBusinesscallssettings set(String parameterName, Object value) { return (UpdateBusinesscallssettings) super.set(parameterName, value); } } /** * An accessor for creating requests from the Businesscallsinsights collection. * * <p>The typical use is:</p> * <pre> * {@code MyBusinessBusinessCalls mybusinessbusinesscalls = new MyBusinessBusinessCalls(...);} * {@code MyBusinessBusinessCalls.Businesscallsinsights.List request = mybusinessbusinesscalls.businesscallsinsights().list(parameters ...)} * </pre> * * @return the resource collection */ public Businesscallsinsights businesscallsinsights() { return new Businesscallsinsights(); } /** * The "businesscallsinsights" collection of methods. */ public class Businesscallsinsights { /** * Returns insights for Business calls for a location. * * Create a request for the method "businesscallsinsights.list". * * This request holds the parameters needed by the mybusinessbusinesscalls server. After setting * any optional parameters, call the {@link List#execute()} method to invoke the remote operation. * * @param parent Required. The parent location to fetch calls insights for. Format: locations/{location_id} * @return the request */ public List list(java.lang.String parent) throws java.io.IOException { List result = new List(parent); initialize(result); return result; } public class List extends MyBusinessBusinessCallsRequest<com.google.api.services.mybusinessbusinesscalls.v1.model.ListBusinessCallsInsightsResponse> { private static final String REST_PATH = "v1/{+parent}/businesscallsinsights"; private final java.util.regex.Pattern PARENT_PATTERN = java.util.regex.Pattern.compile("^locations/[^/]+$"); /** * Returns insights for Business calls for a location. * * Create a request for the method "businesscallsinsights.list". * * This request holds the parameters needed by the the mybusinessbusinesscalls server. After * setting any optional parameters, call the {@link List#execute()} method to invoke the remote * operation. <p> {@link * List#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must be * called to initialize this instance immediately after invoking the constructor. </p> * * @param parent Required. The parent location to fetch calls insights for. Format: locations/{location_id} * @since 1.13 */ protected List(java.lang.String parent) { super(MyBusinessBusinessCalls.this, "GET", REST_PATH, null, com.google.api.services.mybusinessbusinesscalls.v1.model.ListBusinessCallsInsightsResponse.class); this.parent = com.google.api.client.util.Preconditions.checkNotNull(parent, "Required parameter parent must be specified."); if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(PARENT_PATTERN.matcher(parent).matches(), "Parameter parent must conform to the pattern " + "^locations/[^/]+$"); } } @Override public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException { return super.executeUsingHead(); } @Override public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException { return super.buildHttpRequestUsingHead(); } @Override public List set$Xgafv(java.lang.String $Xgafv) { return (List) super.set$Xgafv($Xgafv); } @Override public List setAccessToken(java.lang.String accessToken) { return (List) super.setAccessToken(accessToken); } @Override public List setAlt(java.lang.String alt) { return (List) super.setAlt(alt); } @Override public List setCallback(java.lang.String callback) { return (List) super.setCallback(callback); } @Override public List setFields(java.lang.String fields) { return (List) super.setFields(fields); } @Override public List setKey(java.lang.String key) { return (List) super.setKey(key); } @Override public List setOauthToken(java.lang.String oauthToken) { return (List) super.setOauthToken(oauthToken); } @Override public List setPrettyPrint(java.lang.Boolean prettyPrint) { return (List) super.setPrettyPrint(prettyPrint); } @Override public List setQuotaUser(java.lang.String quotaUser) { return (List) super.setQuotaUser(quotaUser); } @Override public List setUploadType(java.lang.String uploadType) { return (List) super.setUploadType(uploadType); } @Override public List setUploadProtocol(java.lang.String uploadProtocol) { return (List) super.setUploadProtocol(uploadProtocol); } /** * Required. The parent location to fetch calls insights for. Format: * locations/{location_id} */ @com.google.api.client.util.Key private java.lang.String parent; /** Required. The parent location to fetch calls insights for. Format: locations/{location_id} */ public java.lang.String getParent() { return parent; } /** * Required. The parent location to fetch calls insights for. Format: * locations/{location_id} */ public List setParent(java.lang.String parent) { if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(PARENT_PATTERN.matcher(parent).matches(), "Parameter parent must conform to the pattern " + "^locations/[^/]+$"); } this.parent = parent; return this; } /** * Optional. A filter constraining the calls insights to return. The response includes only * entries that match the filter. If the MetricType is not provided, AGGREGATE_COUNT is * returned. If no end_date is provided, the last date for which data is available is used. * If no start_date is provided, we will default to the first date for which data is * available, which is currently 6 months. If start_date is before the date when data is * available, data is returned starting from the date when it is available. At this time we * support following filters. 1. start_date="DATE" where date is in YYYY-MM-DD format. 2. * end_date="DATE" where date is in YYYY-MM-DD format. 3. metric_type=XYZ where XYZ is a * valid MetricType. 4. Conjunctions(AND) of all of the above. e.g., "start_date=2021-08-01 * AND end_date=2021-08-10 AND metric_type=AGGREGATE_COUNT" The AGGREGATE_COUNT metric_type * ignores the DD part of the date. */ @com.google.api.client.util.Key private java.lang.String filter; /** Optional. A filter constraining the calls insights to return. The response includes only entries that match the filter. If the MetricType is not provided, AGGREGATE_COUNT is returned. If no end_date is provided, the last date for which data is available is used. If no start_date is provided, we will default to the first date for which data is available, which is currently 6 months. If start_date is before the date when data is available, data is returned starting from the date when it is available. At this time we support following filters. 1. start_date="DATE" where date is in YYYY-MM-DD format. 2. end_date="DATE" where date is in YYYY-MM-DD format. 3. metric_type=XYZ where XYZ is a valid MetricType. 4. Conjunctions(AND) of all of the above. e.g., "start_date=2021-08-01 AND end_date=2021-08-10 AND metric_type=AGGREGATE_COUNT" The AGGREGATE_COUNT metric_type ignores the DD part of the date. */ public java.lang.String getFilter() { return filter; } /** * Optional. A filter constraining the calls insights to return. The response includes only * entries that match the filter. If the MetricType is not provided, AGGREGATE_COUNT is * returned. If no end_date is provided, the last date for which data is available is used. * If no start_date is provided, we will default to the first date for which data is * available, which is currently 6 months. If start_date is before the date when data is * available, data is returned starting from the date when it is available. At this time we * support following filters. 1. start_date="DATE" where date is in YYYY-MM-DD format. 2. * end_date="DATE" where date is in YYYY-MM-DD format. 3. metric_type=XYZ where XYZ is a * valid MetricType. 4. Conjunctions(AND) of all of the above. e.g., "start_date=2021-08-01 * AND end_date=2021-08-10 AND metric_type=AGGREGATE_COUNT" The AGGREGATE_COUNT metric_type * ignores the DD part of the date. */ public List setFilter(java.lang.String filter) { this.filter = filter; return this; } /** * Optional. The maximum number of BusinessCallsInsights to return. If unspecified, at most * 20 will be returned. Some of the metric_types(e.g, AGGREGATE_COUNT) returns a single * page. For these metrics, the page_size is ignored. */ @com.google.api.client.util.Key private java.lang.Integer pageSize; /** Optional. The maximum number of BusinessCallsInsights to return. If unspecified, at most 20 will be returned. Some of the metric_types(e.g, AGGREGATE_COUNT) returns a single page. For these metrics, the page_size is ignored. */ public java.lang.Integer getPageSize() { return pageSize; } /** * Optional. The maximum number of BusinessCallsInsights to return. If unspecified, at most * 20 will be returned. Some of the metric_types(e.g, AGGREGATE_COUNT) returns a single * page. For these metrics, the page_size is ignored. */ public List setPageSize(java.lang.Integer pageSize) { this.pageSize = pageSize; return this; } /** * Optional. A page token, received from a previous `ListBusinessCallsInsights` call. * Provide this to retrieve the subsequent page. When paginating, all other parameters * provided to `ListBusinessCallsInsights` must match the call that provided the page token. * Some of the metric_types (e.g, AGGREGATE_COUNT) returns a single page. For these metrics, * the pake_token is ignored. */ @com.google.api.client.util.Key private java.lang.String pageToken; /** Optional. A page token, received from a previous `ListBusinessCallsInsights` call. Provide this to retrieve the subsequent page. When paginating, all other parameters provided to `ListBusinessCallsInsights` must match the call that provided the page token. Some of the metric_types (e.g, AGGREGATE_COUNT) returns a single page. For these metrics, the pake_token is ignored. */ public java.lang.String getPageToken() { return pageToken; } /** * Optional. A page token, received from a previous `ListBusinessCallsInsights` call. * Provide this to retrieve the subsequent page. When paginating, all other parameters * provided to `ListBusinessCallsInsights` must match the call that provided the page token. * Some of the metric_types (e.g, AGGREGATE_COUNT) returns a single page. For these metrics, * the pake_token is ignored. */ public List setPageToken(java.lang.String pageToken) { this.pageToken = pageToken; return this; } @Override public List set(String parameterName, Object value) { return (List) super.set(parameterName, value); } } } } /** * Builder for {@link MyBusinessBusinessCalls}. * * <p> * Implementation is not thread-safe. * </p> * * @since 1.3.0 */ public static final class Builder extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient.Builder { private static String chooseEndpoint(com.google.api.client.http.HttpTransport transport) { // If the GOOGLE_API_USE_MTLS_ENDPOINT environment variable value is "always", use mTLS endpoint. // If the env variable is "auto", use mTLS endpoint if and only if the transport is mTLS. // Use the regular endpoint for all other cases. String useMtlsEndpoint = System.getenv("GOOGLE_API_USE_MTLS_ENDPOINT"); useMtlsEndpoint = useMtlsEndpoint == null ? "auto" : useMtlsEndpoint; if ("always".equals(useMtlsEndpoint) || ("auto".equals(useMtlsEndpoint) && transport != null && transport.isMtls())) { return DEFAULT_MTLS_ROOT_URL; } return DEFAULT_ROOT_URL; } /** * Returns an instance of a new builder. * * @param transport HTTP transport, which should normally be: * <ul> * <li>Google App Engine: * {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li> * <li>Android: {@code newCompatibleTransport} from * {@code com.google.api.client.extensions.android.http.AndroidHttp}</li> * <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()} * </li> * </ul> * @param jsonFactory JSON factory, which may be: * <ul> * <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li> * <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li> * <li>Android Honeycomb or higher: * {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li> * </ul> * @param httpRequestInitializer HTTP request initializer or {@code null} for none * @since 1.7 */ public Builder(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory, com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { super( transport, jsonFactory, Builder.chooseEndpoint(transport), DEFAULT_SERVICE_PATH, httpRequestInitializer, false); setBatchPath(DEFAULT_BATCH_PATH); } /** Builds a new instance of {@link MyBusinessBusinessCalls}. */ @Override public MyBusinessBusinessCalls build() { return new MyBusinessBusinessCalls(this); } @Override public Builder setRootUrl(String rootUrl) { return (Builder) super.setRootUrl(rootUrl); } @Override public Builder setServicePath(String servicePath) { return (Builder) super.setServicePath(servicePath); } @Override public Builder setBatchPath(String batchPath) { return (Builder) super.setBatchPath(batchPath); } @Override public Builder setHttpRequestInitializer(com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { return (Builder) super.setHttpRequestInitializer(httpRequestInitializer); } @Override public Builder setApplicationName(String applicationName) { return (Builder) super.setApplicationName(applicationName); } @Override public Builder setSuppressPatternChecks(boolean suppressPatternChecks) { return (Builder) super.setSuppressPatternChecks(suppressPatternChecks); } @Override public Builder setSuppressRequiredParameterChecks(boolean suppressRequiredParameterChecks) { return (Builder) super.setSuppressRequiredParameterChecks(suppressRequiredParameterChecks); } @Override public Builder setSuppressAllChecks(boolean suppressAllChecks) { return (Builder) super.setSuppressAllChecks(suppressAllChecks); } /** * Set the {@link MyBusinessBusinessCallsRequestInitializer}. * * @since 1.12 */ public Builder setMyBusinessBusinessCallsRequestInitializer( MyBusinessBusinessCallsRequestInitializer mybusinessbusinesscallsRequestInitializer) { return (Builder) super.setGoogleClientRequestInitializer(mybusinessbusinesscallsRequestInitializer); } @Override public Builder setGoogleClientRequestInitializer( com.google.api.client.googleapis.services.GoogleClientRequestInitializer googleClientRequestInitializer) { return (Builder) super.setGoogleClientRequestInitializer(googleClientRequestInitializer); } } }
googleapis/google-cloud-java
36,357
java-alloydb/proto-google-cloud-alloydb-v1/src/main/java/com/google/cloud/alloydb/v1/InstanceOrBuilder.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/alloydb/v1/resources.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.alloydb.v1; public interface InstanceOrBuilder extends // @@protoc_insertion_point(interface_extends:google.cloud.alloydb.v1.Instance) com.google.protobuf.MessageOrBuilder { /** * * * <pre> * Output only. The name of the instance resource with the format: * * projects/{project}/locations/{region}/clusters/{cluster_id}/instances/{instance_id} * where the cluster and instance ID segments should satisfy the regex * expression `[a-z]([a-z0-9-]{0,61}[a-z0-9])?`, e.g. 1-63 characters of * lowercase letters, numbers, and dashes, starting with a letter, and ending * with a letter or number. For more details see https://google.aip.dev/122. * The prefix of the instance resource name is the name of the parent * resource: * * projects/{project}/locations/{region}/clusters/{cluster_id} * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The name. */ java.lang.String getName(); /** * * * <pre> * Output only. The name of the instance resource with the format: * * projects/{project}/locations/{region}/clusters/{cluster_id}/instances/{instance_id} * where the cluster and instance ID segments should satisfy the regex * expression `[a-z]([a-z0-9-]{0,61}[a-z0-9])?`, e.g. 1-63 characters of * lowercase letters, numbers, and dashes, starting with a letter, and ending * with a letter or number. For more details see https://google.aip.dev/122. * The prefix of the instance resource name is the name of the parent * resource: * * projects/{project}/locations/{region}/clusters/{cluster_id} * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for name. */ com.google.protobuf.ByteString getNameBytes(); /** * * * <pre> * User-settable and human-readable display name for the Instance. * </pre> * * <code>string display_name = 2;</code> * * @return The displayName. */ java.lang.String getDisplayName(); /** * * * <pre> * User-settable and human-readable display name for the Instance. * </pre> * * <code>string display_name = 2;</code> * * @return The bytes for displayName. */ com.google.protobuf.ByteString getDisplayNameBytes(); /** * * * <pre> * Output only. The system-generated UID of the resource. The UID is assigned * when the resource is created, and it is retained until it is deleted. * </pre> * * <code>string uid = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The uid. */ java.lang.String getUid(); /** * * * <pre> * Output only. The system-generated UID of the resource. The UID is assigned * when the resource is created, and it is retained until it is deleted. * </pre> * * <code>string uid = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for uid. */ com.google.protobuf.ByteString getUidBytes(); /** * * * <pre> * Output only. Create time stamp * </pre> * * <code>.google.protobuf.Timestamp create_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the createTime field is set. */ boolean hasCreateTime(); /** * * * <pre> * Output only. Create time stamp * </pre> * * <code>.google.protobuf.Timestamp create_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The createTime. */ com.google.protobuf.Timestamp getCreateTime(); /** * * * <pre> * Output only. Create time stamp * </pre> * * <code>.google.protobuf.Timestamp create_time = 4 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder(); /** * * * <pre> * Output only. Update time stamp * </pre> * * <code>.google.protobuf.Timestamp update_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the updateTime field is set. */ boolean hasUpdateTime(); /** * * * <pre> * Output only. Update time stamp * </pre> * * <code>.google.protobuf.Timestamp update_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The updateTime. */ com.google.protobuf.Timestamp getUpdateTime(); /** * * * <pre> * Output only. Update time stamp * </pre> * * <code>.google.protobuf.Timestamp update_time = 5 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder(); /** * * * <pre> * Output only. Delete time stamp * </pre> * * <code>.google.protobuf.Timestamp delete_time = 6 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the deleteTime field is set. */ boolean hasDeleteTime(); /** * * * <pre> * Output only. Delete time stamp * </pre> * * <code>.google.protobuf.Timestamp delete_time = 6 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The deleteTime. */ com.google.protobuf.Timestamp getDeleteTime(); /** * * * <pre> * Output only. Delete time stamp * </pre> * * <code>.google.protobuf.Timestamp delete_time = 6 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.protobuf.TimestampOrBuilder getDeleteTimeOrBuilder(); /** * * * <pre> * Labels as key value pairs * </pre> * * <code>map&lt;string, string&gt; labels = 7;</code> */ int getLabelsCount(); /** * * * <pre> * Labels as key value pairs * </pre> * * <code>map&lt;string, string&gt; labels = 7;</code> */ boolean containsLabels(java.lang.String key); /** Use {@link #getLabelsMap()} instead. */ @java.lang.Deprecated java.util.Map<java.lang.String, java.lang.String> getLabels(); /** * * * <pre> * Labels as key value pairs * </pre> * * <code>map&lt;string, string&gt; labels = 7;</code> */ java.util.Map<java.lang.String, java.lang.String> getLabelsMap(); /** * * * <pre> * Labels as key value pairs * </pre> * * <code>map&lt;string, string&gt; labels = 7;</code> */ /* nullable */ java.lang.String getLabelsOrDefault( java.lang.String key, /* nullable */ java.lang.String defaultValue); /** * * * <pre> * Labels as key value pairs * </pre> * * <code>map&lt;string, string&gt; labels = 7;</code> */ java.lang.String getLabelsOrThrow(java.lang.String key); /** * * * <pre> * Output only. The current serving state of the instance. * </pre> * * <code> * .google.cloud.alloydb.v1.Instance.State state = 8 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The enum numeric value on the wire for state. */ int getStateValue(); /** * * * <pre> * Output only. The current serving state of the instance. * </pre> * * <code> * .google.cloud.alloydb.v1.Instance.State state = 8 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The state. */ com.google.cloud.alloydb.v1.Instance.State getState(); /** * * * <pre> * Required. The type of the instance. Specified at creation time. * </pre> * * <code> * .google.cloud.alloydb.v1.Instance.InstanceType instance_type = 9 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The enum numeric value on the wire for instanceType. */ int getInstanceTypeValue(); /** * * * <pre> * Required. The type of the instance. Specified at creation time. * </pre> * * <code> * .google.cloud.alloydb.v1.Instance.InstanceType instance_type = 9 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The instanceType. */ com.google.cloud.alloydb.v1.Instance.InstanceType getInstanceType(); /** * * * <pre> * Configurations for the machines that host the underlying * database engine. * </pre> * * <code>.google.cloud.alloydb.v1.Instance.MachineConfig machine_config = 10;</code> * * @return Whether the machineConfig field is set. */ boolean hasMachineConfig(); /** * * * <pre> * Configurations for the machines that host the underlying * database engine. * </pre> * * <code>.google.cloud.alloydb.v1.Instance.MachineConfig machine_config = 10;</code> * * @return The machineConfig. */ com.google.cloud.alloydb.v1.Instance.MachineConfig getMachineConfig(); /** * * * <pre> * Configurations for the machines that host the underlying * database engine. * </pre> * * <code>.google.cloud.alloydb.v1.Instance.MachineConfig machine_config = 10;</code> */ com.google.cloud.alloydb.v1.Instance.MachineConfigOrBuilder getMachineConfigOrBuilder(); /** * * * <pre> * Availability type of an Instance. * If empty, defaults to REGIONAL for primary instances. * For read pools, availability_type is always UNSPECIFIED. Instances in the * read pools are evenly distributed across available zones within the region * (i.e. read pools with more than one node will have a node in at * least two zones). * </pre> * * <code>.google.cloud.alloydb.v1.Instance.AvailabilityType availability_type = 11;</code> * * @return The enum numeric value on the wire for availabilityType. */ int getAvailabilityTypeValue(); /** * * * <pre> * Availability type of an Instance. * If empty, defaults to REGIONAL for primary instances. * For read pools, availability_type is always UNSPECIFIED. Instances in the * read pools are evenly distributed across available zones within the region * (i.e. read pools with more than one node will have a node in at * least two zones). * </pre> * * <code>.google.cloud.alloydb.v1.Instance.AvailabilityType availability_type = 11;</code> * * @return The availabilityType. */ com.google.cloud.alloydb.v1.Instance.AvailabilityType getAvailabilityType(); /** * * * <pre> * The Compute Engine zone that the instance should serve from, per * https://cloud.google.com/compute/docs/regions-zones * This can ONLY be specified for ZONAL instances. * If present for a REGIONAL instance, an error will be thrown. * If this is absent for a ZONAL instance, instance is created in a random * zone with available capacity. * </pre> * * <code>string gce_zone = 12;</code> * * @return The gceZone. */ java.lang.String getGceZone(); /** * * * <pre> * The Compute Engine zone that the instance should serve from, per * https://cloud.google.com/compute/docs/regions-zones * This can ONLY be specified for ZONAL instances. * If present for a REGIONAL instance, an error will be thrown. * If this is absent for a ZONAL instance, instance is created in a random * zone with available capacity. * </pre> * * <code>string gce_zone = 12;</code> * * @return The bytes for gceZone. */ com.google.protobuf.ByteString getGceZoneBytes(); /** * * * <pre> * Database flags. Set at the instance level. * They are copied from the primary instance on secondary instance creation. * Flags that have restrictions default to the value at primary * instance on read instances during creation. Read instances can set new * flags or override existing flags that are relevant for reads, for example, * for enabling columnar cache on a read instance. Flags set on read instance * might or might not be present on the primary instance. * * * This is a list of "key": "value" pairs. * "key": The name of the flag. These flags are passed at instance setup time, * so include both server options and system variables for Postgres. Flags are * specified with underscores, not hyphens. * "value": The value of the flag. Booleans are set to **on** for true * and **off** for false. This field must be omitted if the flag * doesn't take a value. * </pre> * * <code>map&lt;string, string&gt; database_flags = 13;</code> */ int getDatabaseFlagsCount(); /** * * * <pre> * Database flags. Set at the instance level. * They are copied from the primary instance on secondary instance creation. * Flags that have restrictions default to the value at primary * instance on read instances during creation. Read instances can set new * flags or override existing flags that are relevant for reads, for example, * for enabling columnar cache on a read instance. Flags set on read instance * might or might not be present on the primary instance. * * * This is a list of "key": "value" pairs. * "key": The name of the flag. These flags are passed at instance setup time, * so include both server options and system variables for Postgres. Flags are * specified with underscores, not hyphens. * "value": The value of the flag. Booleans are set to **on** for true * and **off** for false. This field must be omitted if the flag * doesn't take a value. * </pre> * * <code>map&lt;string, string&gt; database_flags = 13;</code> */ boolean containsDatabaseFlags(java.lang.String key); /** Use {@link #getDatabaseFlagsMap()} instead. */ @java.lang.Deprecated java.util.Map<java.lang.String, java.lang.String> getDatabaseFlags(); /** * * * <pre> * Database flags. Set at the instance level. * They are copied from the primary instance on secondary instance creation. * Flags that have restrictions default to the value at primary * instance on read instances during creation. Read instances can set new * flags or override existing flags that are relevant for reads, for example, * for enabling columnar cache on a read instance. Flags set on read instance * might or might not be present on the primary instance. * * * This is a list of "key": "value" pairs. * "key": The name of the flag. These flags are passed at instance setup time, * so include both server options and system variables for Postgres. Flags are * specified with underscores, not hyphens. * "value": The value of the flag. Booleans are set to **on** for true * and **off** for false. This field must be omitted if the flag * doesn't take a value. * </pre> * * <code>map&lt;string, string&gt; database_flags = 13;</code> */ java.util.Map<java.lang.String, java.lang.String> getDatabaseFlagsMap(); /** * * * <pre> * Database flags. Set at the instance level. * They are copied from the primary instance on secondary instance creation. * Flags that have restrictions default to the value at primary * instance on read instances during creation. Read instances can set new * flags or override existing flags that are relevant for reads, for example, * for enabling columnar cache on a read instance. Flags set on read instance * might or might not be present on the primary instance. * * * This is a list of "key": "value" pairs. * "key": The name of the flag. These flags are passed at instance setup time, * so include both server options and system variables for Postgres. Flags are * specified with underscores, not hyphens. * "value": The value of the flag. Booleans are set to **on** for true * and **off** for false. This field must be omitted if the flag * doesn't take a value. * </pre> * * <code>map&lt;string, string&gt; database_flags = 13;</code> */ /* nullable */ java.lang.String getDatabaseFlagsOrDefault( java.lang.String key, /* nullable */ java.lang.String defaultValue); /** * * * <pre> * Database flags. Set at the instance level. * They are copied from the primary instance on secondary instance creation. * Flags that have restrictions default to the value at primary * instance on read instances during creation. Read instances can set new * flags or override existing flags that are relevant for reads, for example, * for enabling columnar cache on a read instance. Flags set on read instance * might or might not be present on the primary instance. * * * This is a list of "key": "value" pairs. * "key": The name of the flag. These flags are passed at instance setup time, * so include both server options and system variables for Postgres. Flags are * specified with underscores, not hyphens. * "value": The value of the flag. Booleans are set to **on** for true * and **off** for false. This field must be omitted if the flag * doesn't take a value. * </pre> * * <code>map&lt;string, string&gt; database_flags = 13;</code> */ java.lang.String getDatabaseFlagsOrThrow(java.lang.String key); /** * * * <pre> * Output only. This is set for the read-write VM of the PRIMARY instance * only. * </pre> * * <code> * .google.cloud.alloydb.v1.Instance.Node writable_node = 19 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the writableNode field is set. */ boolean hasWritableNode(); /** * * * <pre> * Output only. This is set for the read-write VM of the PRIMARY instance * only. * </pre> * * <code> * .google.cloud.alloydb.v1.Instance.Node writable_node = 19 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The writableNode. */ com.google.cloud.alloydb.v1.Instance.Node getWritableNode(); /** * * * <pre> * Output only. This is set for the read-write VM of the PRIMARY instance * only. * </pre> * * <code> * .google.cloud.alloydb.v1.Instance.Node writable_node = 19 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.cloud.alloydb.v1.Instance.NodeOrBuilder getWritableNodeOrBuilder(); /** * * * <pre> * Output only. List of available read-only VMs in this instance, including * the standby for a PRIMARY instance. * </pre> * * <code> * repeated .google.cloud.alloydb.v1.Instance.Node nodes = 20 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ java.util.List<com.google.cloud.alloydb.v1.Instance.Node> getNodesList(); /** * * * <pre> * Output only. List of available read-only VMs in this instance, including * the standby for a PRIMARY instance. * </pre> * * <code> * repeated .google.cloud.alloydb.v1.Instance.Node nodes = 20 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.cloud.alloydb.v1.Instance.Node getNodes(int index); /** * * * <pre> * Output only. List of available read-only VMs in this instance, including * the standby for a PRIMARY instance. * </pre> * * <code> * repeated .google.cloud.alloydb.v1.Instance.Node nodes = 20 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ int getNodesCount(); /** * * * <pre> * Output only. List of available read-only VMs in this instance, including * the standby for a PRIMARY instance. * </pre> * * <code> * repeated .google.cloud.alloydb.v1.Instance.Node nodes = 20 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ java.util.List<? extends com.google.cloud.alloydb.v1.Instance.NodeOrBuilder> getNodesOrBuilderList(); /** * * * <pre> * Output only. List of available read-only VMs in this instance, including * the standby for a PRIMARY instance. * </pre> * * <code> * repeated .google.cloud.alloydb.v1.Instance.Node nodes = 20 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.cloud.alloydb.v1.Instance.NodeOrBuilder getNodesOrBuilder(int index); /** * * * <pre> * Configuration for query insights. * </pre> * * <code>.google.cloud.alloydb.v1.Instance.QueryInsightsInstanceConfig query_insights_config = 21; * </code> * * @return Whether the queryInsightsConfig field is set. */ boolean hasQueryInsightsConfig(); /** * * * <pre> * Configuration for query insights. * </pre> * * <code>.google.cloud.alloydb.v1.Instance.QueryInsightsInstanceConfig query_insights_config = 21; * </code> * * @return The queryInsightsConfig. */ com.google.cloud.alloydb.v1.Instance.QueryInsightsInstanceConfig getQueryInsightsConfig(); /** * * * <pre> * Configuration for query insights. * </pre> * * <code>.google.cloud.alloydb.v1.Instance.QueryInsightsInstanceConfig query_insights_config = 21; * </code> */ com.google.cloud.alloydb.v1.Instance.QueryInsightsInstanceConfigOrBuilder getQueryInsightsConfigOrBuilder(); /** * * * <pre> * Configuration for observability. * </pre> * * <code>.google.cloud.alloydb.v1.Instance.ObservabilityInstanceConfig observability_config = 26; * </code> * * @return Whether the observabilityConfig field is set. */ boolean hasObservabilityConfig(); /** * * * <pre> * Configuration for observability. * </pre> * * <code>.google.cloud.alloydb.v1.Instance.ObservabilityInstanceConfig observability_config = 26; * </code> * * @return The observabilityConfig. */ com.google.cloud.alloydb.v1.Instance.ObservabilityInstanceConfig getObservabilityConfig(); /** * * * <pre> * Configuration for observability. * </pre> * * <code>.google.cloud.alloydb.v1.Instance.ObservabilityInstanceConfig observability_config = 26; * </code> */ com.google.cloud.alloydb.v1.Instance.ObservabilityInstanceConfigOrBuilder getObservabilityConfigOrBuilder(); /** * * * <pre> * Read pool instance configuration. * This is required if the value of instanceType is READ_POOL. * </pre> * * <code>.google.cloud.alloydb.v1.Instance.ReadPoolConfig read_pool_config = 14;</code> * * @return Whether the readPoolConfig field is set. */ boolean hasReadPoolConfig(); /** * * * <pre> * Read pool instance configuration. * This is required if the value of instanceType is READ_POOL. * </pre> * * <code>.google.cloud.alloydb.v1.Instance.ReadPoolConfig read_pool_config = 14;</code> * * @return The readPoolConfig. */ com.google.cloud.alloydb.v1.Instance.ReadPoolConfig getReadPoolConfig(); /** * * * <pre> * Read pool instance configuration. * This is required if the value of instanceType is READ_POOL. * </pre> * * <code>.google.cloud.alloydb.v1.Instance.ReadPoolConfig read_pool_config = 14;</code> */ com.google.cloud.alloydb.v1.Instance.ReadPoolConfigOrBuilder getReadPoolConfigOrBuilder(); /** * * * <pre> * Output only. The IP address for the Instance. * This is the connection endpoint for an end-user application. * </pre> * * <code>string ip_address = 15 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The ipAddress. */ java.lang.String getIpAddress(); /** * * * <pre> * Output only. The IP address for the Instance. * This is the connection endpoint for an end-user application. * </pre> * * <code>string ip_address = 15 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for ipAddress. */ com.google.protobuf.ByteString getIpAddressBytes(); /** * * * <pre> * Output only. The public IP addresses for the Instance. This is available * ONLY when enable_public_ip is set. This is the connection endpoint for an * end-user application. * </pre> * * <code> * string public_ip_address = 27 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_info) = { ... } * </code> * * @return The publicIpAddress. */ java.lang.String getPublicIpAddress(); /** * * * <pre> * Output only. The public IP addresses for the Instance. This is available * ONLY when enable_public_ip is set. This is the connection endpoint for an * end-user application. * </pre> * * <code> * string public_ip_address = 27 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_info) = { ... } * </code> * * @return The bytes for publicIpAddress. */ com.google.protobuf.ByteString getPublicIpAddressBytes(); /** * * * <pre> * Output only. Reconciling (https://google.aip.dev/128#reconciliation). * Set to true if the current state of Instance does not match the user's * intended state, and the service is actively updating the resource to * reconcile them. This can happen due to user-triggered updates or * system actions like failover or maintenance. * </pre> * * <code>bool reconciling = 16 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The reconciling. */ boolean getReconciling(); /** * * * <pre> * For Resource freshness validation (https://google.aip.dev/154) * </pre> * * <code>string etag = 17;</code> * * @return The etag. */ java.lang.String getEtag(); /** * * * <pre> * For Resource freshness validation (https://google.aip.dev/154) * </pre> * * <code>string etag = 17;</code> * * @return The bytes for etag. */ com.google.protobuf.ByteString getEtagBytes(); /** * * * <pre> * Annotations to allow client tools to store small amount of arbitrary data. * This is distinct from labels. * https://google.aip.dev/128 * </pre> * * <code>map&lt;string, string&gt; annotations = 18;</code> */ int getAnnotationsCount(); /** * * * <pre> * Annotations to allow client tools to store small amount of arbitrary data. * This is distinct from labels. * https://google.aip.dev/128 * </pre> * * <code>map&lt;string, string&gt; annotations = 18;</code> */ boolean containsAnnotations(java.lang.String key); /** Use {@link #getAnnotationsMap()} instead. */ @java.lang.Deprecated java.util.Map<java.lang.String, java.lang.String> getAnnotations(); /** * * * <pre> * Annotations to allow client tools to store small amount of arbitrary data. * This is distinct from labels. * https://google.aip.dev/128 * </pre> * * <code>map&lt;string, string&gt; annotations = 18;</code> */ java.util.Map<java.lang.String, java.lang.String> getAnnotationsMap(); /** * * * <pre> * Annotations to allow client tools to store small amount of arbitrary data. * This is distinct from labels. * https://google.aip.dev/128 * </pre> * * <code>map&lt;string, string&gt; annotations = 18;</code> */ /* nullable */ java.lang.String getAnnotationsOrDefault( java.lang.String key, /* nullable */ java.lang.String defaultValue); /** * * * <pre> * Annotations to allow client tools to store small amount of arbitrary data. * This is distinct from labels. * https://google.aip.dev/128 * </pre> * * <code>map&lt;string, string&gt; annotations = 18;</code> */ java.lang.String getAnnotationsOrThrow(java.lang.String key); /** * * * <pre> * Optional. Client connection specific configurations * </pre> * * <code> * .google.cloud.alloydb.v1.Instance.ClientConnectionConfig client_connection_config = 23 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the clientConnectionConfig field is set. */ boolean hasClientConnectionConfig(); /** * * * <pre> * Optional. Client connection specific configurations * </pre> * * <code> * .google.cloud.alloydb.v1.Instance.ClientConnectionConfig client_connection_config = 23 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The clientConnectionConfig. */ com.google.cloud.alloydb.v1.Instance.ClientConnectionConfig getClientConnectionConfig(); /** * * * <pre> * Optional. Client connection specific configurations * </pre> * * <code> * .google.cloud.alloydb.v1.Instance.ClientConnectionConfig client_connection_config = 23 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ com.google.cloud.alloydb.v1.Instance.ClientConnectionConfigOrBuilder getClientConnectionConfigOrBuilder(); /** * * * <pre> * Output only. Reserved for future use. * </pre> * * <code>bool satisfies_pzs = 24 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The satisfiesPzs. */ boolean getSatisfiesPzs(); /** * * * <pre> * Optional. The configuration for Private Service Connect (PSC) for the * instance. * </pre> * * <code> * .google.cloud.alloydb.v1.Instance.PscInstanceConfig psc_instance_config = 28 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the pscInstanceConfig field is set. */ boolean hasPscInstanceConfig(); /** * * * <pre> * Optional. The configuration for Private Service Connect (PSC) for the * instance. * </pre> * * <code> * .google.cloud.alloydb.v1.Instance.PscInstanceConfig psc_instance_config = 28 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The pscInstanceConfig. */ com.google.cloud.alloydb.v1.Instance.PscInstanceConfig getPscInstanceConfig(); /** * * * <pre> * Optional. The configuration for Private Service Connect (PSC) for the * instance. * </pre> * * <code> * .google.cloud.alloydb.v1.Instance.PscInstanceConfig psc_instance_config = 28 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ com.google.cloud.alloydb.v1.Instance.PscInstanceConfigOrBuilder getPscInstanceConfigOrBuilder(); /** * * * <pre> * Optional. Instance-level network configuration. * </pre> * * <code> * .google.cloud.alloydb.v1.Instance.InstanceNetworkConfig network_config = 29 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the networkConfig field is set. */ boolean hasNetworkConfig(); /** * * * <pre> * Optional. Instance-level network configuration. * </pre> * * <code> * .google.cloud.alloydb.v1.Instance.InstanceNetworkConfig network_config = 29 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The networkConfig. */ com.google.cloud.alloydb.v1.Instance.InstanceNetworkConfig getNetworkConfig(); /** * * * <pre> * Optional. Instance-level network configuration. * </pre> * * <code> * .google.cloud.alloydb.v1.Instance.InstanceNetworkConfig network_config = 29 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ com.google.cloud.alloydb.v1.Instance.InstanceNetworkConfigOrBuilder getNetworkConfigOrBuilder(); /** * * * <pre> * Output only. All outbound public IP addresses configured for the instance. * </pre> * * <code> * repeated string outbound_public_ip_addresses = 34 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_info) = { ... } * </code> * * @return A list containing the outboundPublicIpAddresses. */ java.util.List<java.lang.String> getOutboundPublicIpAddressesList(); /** * * * <pre> * Output only. All outbound public IP addresses configured for the instance. * </pre> * * <code> * repeated string outbound_public_ip_addresses = 34 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_info) = { ... } * </code> * * @return The count of outboundPublicIpAddresses. */ int getOutboundPublicIpAddressesCount(); /** * * * <pre> * Output only. All outbound public IP addresses configured for the instance. * </pre> * * <code> * repeated string outbound_public_ip_addresses = 34 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_info) = { ... } * </code> * * @param index The index of the element to return. * @return The outboundPublicIpAddresses at the given index. */ java.lang.String getOutboundPublicIpAddresses(int index); /** * * * <pre> * Output only. All outbound public IP addresses configured for the instance. * </pre> * * <code> * repeated string outbound_public_ip_addresses = 34 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_info) = { ... } * </code> * * @param index The index of the value to return. * @return The bytes of the outboundPublicIpAddresses at the given index. */ com.google.protobuf.ByteString getOutboundPublicIpAddressesBytes(int index); /** * * * <pre> * Optional. Specifies whether an instance needs to spin up. Once the instance * is active, the activation policy can be updated to the `NEVER` to stop the * instance. Likewise, the activation policy can be updated to `ALWAYS` to * start the instance. * There are restrictions around when an instance can/cannot be activated (for * example, a read pool instance should be stopped before stopping primary * etc.). Please refer to the API documentation for more details. * </pre> * * <code> * .google.cloud.alloydb.v1.Instance.ActivationPolicy activation_policy = 35 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The enum numeric value on the wire for activationPolicy. */ int getActivationPolicyValue(); /** * * * <pre> * Optional. Specifies whether an instance needs to spin up. Once the instance * is active, the activation policy can be updated to the `NEVER` to stop the * instance. Likewise, the activation policy can be updated to `ALWAYS` to * start the instance. * There are restrictions around when an instance can/cannot be activated (for * example, a read pool instance should be stopped before stopping primary * etc.). Please refer to the API documentation for more details. * </pre> * * <code> * .google.cloud.alloydb.v1.Instance.ActivationPolicy activation_policy = 35 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The activationPolicy. */ com.google.cloud.alloydb.v1.Instance.ActivationPolicy getActivationPolicy(); /** * * * <pre> * Optional. The configuration for Managed Connection Pool (MCP). * </pre> * * <code> * .google.cloud.alloydb.v1.Instance.ConnectionPoolConfig connection_pool_config = 37 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the connectionPoolConfig field is set. */ boolean hasConnectionPoolConfig(); /** * * * <pre> * Optional. The configuration for Managed Connection Pool (MCP). * </pre> * * <code> * .google.cloud.alloydb.v1.Instance.ConnectionPoolConfig connection_pool_config = 37 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The connectionPoolConfig. */ com.google.cloud.alloydb.v1.Instance.ConnectionPoolConfig getConnectionPoolConfig(); /** * * * <pre> * Optional. The configuration for Managed Connection Pool (MCP). * </pre> * * <code> * .google.cloud.alloydb.v1.Instance.ConnectionPoolConfig connection_pool_config = 37 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ com.google.cloud.alloydb.v1.Instance.ConnectionPoolConfigOrBuilder getConnectionPoolConfigOrBuilder(); }
apache/ignite
36,657
modules/core/src/main/java/org/apache/ignite/internal/processors/cache/persistence/snapshot/SnapshotFutureTask.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.persistence.snapshot; import java.io.Closeable; import java.io.File; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicIntegerArray; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.function.BiConsumer; import java.util.function.BooleanSupplier; import java.util.function.IntFunction; import java.util.stream.Collectors; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.pagemem.PageIdUtils; import org.apache.ignite.internal.pagemem.store.PageStore; import org.apache.ignite.internal.pagemem.store.PageWriteListener; import org.apache.ignite.internal.pagemem.wal.record.delta.ClusterSnapshotRecord; import org.apache.ignite.internal.processors.cache.CacheGroupContext; import org.apache.ignite.internal.processors.cache.GridCacheSharedContext; import org.apache.ignite.internal.processors.cache.persistence.GridCacheDatabaseSharedManager; import org.apache.ignite.internal.processors.cache.persistence.checkpoint.CheckpointListener; import org.apache.ignite.internal.processors.cache.persistence.file.FileIO; import org.apache.ignite.internal.processors.cache.persistence.file.FileIOFactory; import org.apache.ignite.internal.processors.cache.persistence.file.FilePageStoreManager; import org.apache.ignite.internal.processors.cache.persistence.filename.FileTreeUtils; import org.apache.ignite.internal.processors.cache.persistence.filename.NodeFileTree; import org.apache.ignite.internal.processors.cache.persistence.filename.SnapshotFileTree; import org.apache.ignite.internal.processors.cache.persistence.metastorage.MetaStorage; import org.apache.ignite.internal.processors.cache.persistence.partstate.GroupPartitionId; import org.apache.ignite.internal.processors.cache.persistence.tree.io.PageIO; import org.apache.ignite.internal.processors.cache.persistence.wal.WALPointer; import org.apache.ignite.internal.processors.cache.persistence.wal.crc.FastCrc; import org.apache.ignite.internal.processors.compress.CompressionProcessor; import org.apache.ignite.internal.processors.metastorage.persistence.DistributedMetaStorageImpl; import org.apache.ignite.internal.util.GridUnsafe; import org.apache.ignite.internal.util.future.GridFutureAdapter; import org.apache.ignite.internal.util.tostring.GridToStringExclude; import org.apache.ignite.internal.util.typedef.C3; import org.apache.ignite.internal.util.typedef.internal.CU; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.U; import org.jetbrains.annotations.Nullable; import static org.apache.ignite.internal.processors.cache.persistence.filename.SnapshotFileTree.partDeltaIndexFile; import static org.apache.ignite.internal.processors.cache.persistence.snapshot.IgniteSnapshotManager.copy; /** * The requested map of cache groups and its partitions to include into snapshot represented as <tt>Map<Integer, Set<Integer>></tt>. * If array of partitions is {@code null} than all OWNING partitions for given cache groups will be included into snapshot. * In this case if all partitions have OWNING state the index partition also will be included. * <p> * If partitions for particular cache group are not provided that they will be collected and added * on checkpoint under the write-lock. */ class SnapshotFutureTask extends AbstractCreateSnapshotFutureTask implements CheckpointListener { /** File page store manager for accessing cache group associated files. */ private final FilePageStoreManager pageStore; /** Local buffer to perform copy-on-write operations for {@link PageStoreSerialWriter}. */ private final ThreadLocal<ByteBuffer> locBuff; /** Node file tree. */ private final NodeFileTree ft; /** IO factory which will be used for creating snapshot delta-writers. */ private final FileIOFactory ioFactory; /** * The length of file size per each cache partition file. * Partition has value greater than zero only for partitions in OWNING state. * Information collected under checkpoint write lock. */ private final Map<GroupPartitionId, Long> partFileLengths = new HashMap<>(); /** * Map of partitions to snapshot and theirs corresponding delta PageStores. * Writers are pinned to the snapshot context due to controlling partition * processing supplier. */ private final Map<GroupPartitionId, PageStoreSerialWriter> partDeltaWriters = new HashMap<>(); /** * List of cache configuration senders. Each sender associated with particular cache * configuration file to monitor it change (e.g. via SQL add/drop column or SQL index * create/drop operations). */ private final List<CacheConfigurationSender> ccfgSndrs = new CopyOnWriteArrayList<>(); /** {@code true} if all metastorage data must be also included into snapshot. */ private final boolean withMetaStorage; /** Checkpoint end future. */ private final CompletableFuture<Boolean> cpEndFut = new CompletableFuture<>(); /** Future to wait until checkpoint mark phase will be finished and snapshot tasks scheduled. */ private final GridFutureAdapter<Void> startedFut = new GridFutureAdapter<>(); /** Pointer to {@link ClusterSnapshotRecord}. */ private volatile @Nullable WALPointer snpPtr; /** Flag indicates that task already scheduled on checkpoint. */ private final AtomicBoolean started = new AtomicBoolean(); /** Estimated snapshot size in bytes. The value may grow during snapshot creation. */ private final AtomicLong totalSize = new AtomicLong(); /** Processed snapshot size in bytes. */ private final AtomicLong processedSize = new AtomicLong(); /** Delta writer factory. */ private final C3<PageStore, File, Integer, PageStoreSerialWriter> deltaWriterFactory = cctx.snapshotMgr().sequentialWrite() ? IndexedPageStoreSerialWriter::new : PageStoreSerialWriter::new; /** * @param cctx Shared context. * @param srcNodeId Node id which cause snapshot task creation. * @param reqId Snapshot operation request ID. * @param sft Snapshot file tree. * @param ft Node file tree. * @param ioFactory Factory to working with snapshot files. * @param snpSndr Factory which produces snapshot receiver instance. * @param parts Map of cache groups and its partitions to include into snapshot, if set of partitions * is {@code null} than all OWNING partitions for given cache groups will be included into snapshot. */ public SnapshotFutureTask( GridCacheSharedContext<?, ?> cctx, UUID srcNodeId, UUID reqId, SnapshotFileTree sft, NodeFileTree ft, FileIOFactory ioFactory, SnapshotSender snpSndr, Map<Integer, Set<Integer>> parts, boolean withMetaStorage, ThreadLocal<ByteBuffer> locBuff ) { super(cctx, srcNodeId, reqId, sft, snpSndr, parts); assert snpName != null : "Snapshot name cannot be empty or null."; assert snpSndr != null : "Snapshot sender which handles execution tasks must be not null."; assert snpSndr.executor() != null : "Executor service must be not null."; assert cctx.pageStore() instanceof FilePageStoreManager : "Snapshot task can work only with physical files."; assert !parts.containsKey(MetaStorage.METASTORAGE_CACHE_ID) : "The withMetaStorage must be used instead."; this.ft = ft; this.ioFactory = ioFactory; this.withMetaStorage = withMetaStorage; this.pageStore = (FilePageStoreManager)cctx.pageStore(); this.locBuff = locBuff; } /** * @param th An exception which occurred during snapshot processing. */ @Override public void acceptException(Throwable th) { if (th == null) return; super.acceptException(th); startedFut.onDone(th); } /** {@inheritDoc} */ @Override public boolean onDone(@Nullable SnapshotFutureTaskResult res, @Nullable Throwable err) { for (PageStoreSerialWriter writer : partDeltaWriters.values()) U.closeQuiet(writer); for (CacheConfigurationSender ccfgSndr : ccfgSndrs) U.closeQuiet(ccfgSndr); snpSndr.close(err); FileTreeUtils.removeTmpSnapshotFiles(sft, err != null, log); if (err != null) startedFut.onDone(err); return super.onDone(res, err); } /** * @return Started future. */ public IgniteInternalFuture<?> started() { return startedFut; } /** * Initiates snapshot task. * * @return {@code true} if task started by this call. */ @Override public boolean start() { if (stopping()) return false; try { if (!started.compareAndSet(false, true)) return false; FileTreeUtils.createCacheStorages(sft.tempFileTree(), log); for (Integer grpId : parts.keySet()) { CacheGroupContext gctx = cctx.cache().cacheGroup(grpId); if (gctx == null) throw new IgniteCheckedException("Cache group context not found: " + grpId); if (!CU.isPersistentCache(gctx.config(), cctx.kernalContext().config().getDataStorageConfiguration())) throw new IgniteCheckedException("In-memory cache groups are not allowed to be snapshot: " + grpId); for (File cs : sft.tempFileTree().cacheStorages(gctx.config())) { // Create cache group snapshot directory on start in a single thread. U.ensureDirectory(cs, "directory for snapshotting cache group", log); } } if (withMetaStorage) { U.ensureDirectory(sft.tempFileTree().metaStorage(), "directory for snapshotting metastorage", log); } startedFut.listen(() -> ((GridCacheDatabaseSharedManager)cctx.database()).removeCheckpointListener(this) ); // Listener will be removed right after first execution. ((GridCacheDatabaseSharedManager)cctx.database()).addCheckpointListener(this); if (log.isInfoEnabled()) { log.info("Snapshot operation is scheduled on local node and will be handled by the checkpoint " + "listener [sctx=" + this + ", topVer=" + cctx.discovery().topologyVersionEx() + ']'); } } catch (IgniteCheckedException e) { acceptException(e); return false; } return true; } /** {@inheritDoc} */ @Override public void beforeCheckpointBegin(Context ctx) throws IgniteCheckedException { if (stopping()) return; ctx.finishedStateFut().listen(f -> { if (f.error() == null) cpEndFut.complete(true); else cpEndFut.completeExceptionally(f.error()); }); if (withMetaStorage) { try { long start = U.currentTimeMillis(); U.get(((DistributedMetaStorageImpl)cctx.kernalContext().distributedMetastorage()).flush()); if (log.isInfoEnabled()) { log.info("Finished waiting for all the concurrent operations over the metadata store before snapshot " + "[snpName=" + snpName + ", time=" + (U.currentTimeMillis() - start) + "ms]"); } } catch (IgniteCheckedException ignore) { // Flushing may be cancelled or interrupted due to the local node stopping. } } } /** {@inheritDoc} */ @Override public void onMarkCheckpointBegin(Context ctx) { // Write lock is hold. Partition pages counters has been collected under write lock. if (stopping()) return; try { // Here we have the following warranties: // 1. Checkpoint holds write acquire lock and Snapshot holds PME. Then there are not any concurrent updates. // 2. This record is written before the related CheckpointRecord, and is flushed with CheckpointRecord or instead it. if (cctx.wal() != null) { snpPtr = cctx.wal().log(new ClusterSnapshotRecord(snpName)); ctx.walFlush(true); } processPartitions(); List<CacheConfiguration<?, ?>> ccfgs = new ArrayList<>(); for (Map.Entry<Integer, Set<Integer>> e : processed.entrySet()) { int grpId = e.getKey(); CacheGroupContext gctx = cctx.cache().cacheGroup(grpId); if (gctx == null) throw new IgniteCheckedException("Cache group is stopped : " + grpId); ccfgs.add(gctx.config()); addPartitionWriters(grpId, e.getValue(), part -> sft.partDeltaFile(gctx.config(), part)); } if (withMetaStorage) { processed.put(MetaStorage.METASTORAGE_CACHE_ID, MetaStorage.METASTORAGE_PARTITIONS); addPartitionWriters(MetaStorage.METASTORAGE_CACHE_ID, MetaStorage.METASTORAGE_PARTITIONS, sft::metastorageDeltaFile); } cctx.cache().configManager().readConfigurationFiles(ccfgs, (ccfg, ccfgFile) -> ccfgSndrs.add(new CacheConfigurationSender(ccfg, ccfgFile))); } catch (IgniteCheckedException e) { acceptException(e); } } /** {@inheritDoc} */ @Override public void onCheckpointBegin(Context ctx) { if (stopping()) return; assert !processed.isEmpty() : "Partitions to process must be collected under checkpoint mark phase"; wrapExceptionIfStarted(() -> snpSndr.init(processed.values().stream().mapToInt(Set::size).sum())) .run(); // Snapshot task can now be started since checkpoint write lock released and // there is no error happen on task init. if (!startedFut.onDone()) return; if (log.isInfoEnabled()) { log.info("Submit partition processing tasks to the snapshot execution pool " + "[map=" + groupByGroupId(partFileLengths.keySet()) + ", totalSize=" + U.humanReadableByteCount(partFileLengths.values().stream().mapToLong(v -> v).sum()) + ']'); } saveSnapshotData(); } /** {@inheritDoc} */ @Override protected List<CompletableFuture<Void>> saveGroup(int grpId, Set<Integer> grpParts) { // Process partitions for a particular cache group. return grpParts.stream().map(partId -> { GroupPartitionId pair = new GroupPartitionId(grpId, partId); Long partLen = partFileLengths.get(pair); totalSize.addAndGet(partLen); return runAsync(() -> { snpSndr.sendPart( partitionFile(ft, pair), partitionFile(sft, pair), storagePath(pair), pair, partLen); // Stop partition writer. partDeltaWriters.get(pair).markPartitionProcessed(); processedSize.addAndGet(partLen); // Wait for the completion of both futures - checkpoint end, copy partition. }).runAfterBothAsync(cpEndFut, wrapExceptionIfStarted(() -> { PageStoreSerialWriter writer = partDeltaWriters.get(pair); writer.close(); File delta = writer.deltaFile; try { // Atomically creates a new, empty delta file if and only if // a file with this name does not yet exist. delta.createNewFile(); } catch (IOException ex) { throw new IgniteCheckedException(ex); } snpSndr.sendDelta(delta, partitionFile(sft, pair), pair); processedSize.addAndGet(delta.length()); boolean deleted = delta.delete(); assert deleted : delta.getAbsolutePath(); File deltaIdx = partDeltaIndexFile(delta); if (deltaIdx.exists()) { deleted = deltaIdx.delete(); assert deleted; } }), snpSndr.executor()); }).collect(Collectors.toList()); } /** {@inheritDoc} */ @Override protected List<CompletableFuture<Void>> saveCacheConfigs() { // Send configuration files of all cache groups. return ccfgSndrs.stream() .map(ccfgSndr -> runAsync(ccfgSndr::sendCacheConfig)) .collect(Collectors.toList()); } /** * @param grpId Cache group id. * @param parts Set of partitions to be processed. * @param partDelta Partition delta file provider. * @throws IgniteCheckedException If fails. */ void addPartitionWriters(int grpId, Set<Integer> parts, IntFunction<File> partDelta) throws IgniteCheckedException { Integer encGrpId = cctx.cache().isEncrypted(grpId) ? grpId : null; for (int partId : parts) { GroupPartitionId pair = new GroupPartitionId(grpId, partId); PageStore store = pageStore.getStore(grpId, partId); File delta = partDelta.apply(partId); partDeltaWriters.put(pair, deltaWriterFactory.apply(store, delta, encGrpId)); partFileLengths.put(pair, store.size()); } } /** {@inheritDoc} */ @Override public synchronized CompletableFuture<Void> closeAsync() { if (closeFut == null) { Throwable err0 = err.get(); // Zero partitions haven't to be written on disk. Set<GroupPartitionId> taken = partFileLengths.entrySet().stream() .filter(e -> e.getValue() > 0) .map(Map.Entry::getKey) .collect(Collectors.toSet()); closeFut = CompletableFuture.runAsync(() -> onDone(new SnapshotFutureTaskResult(taken, snpPtr), err0), cctx.kernalContext().pools().getSystemExecutorService()); } return closeFut; } /** @return Estimated snapshot size in bytes. The value may grow during snapshot creation. */ public long totalSize() { return totalSize.get(); } /** @return Processed snapshot size in bytes. */ public long processedSize() { return processedSize.get(); } /** * @param grps List of processing pairs. * * @return Map with cache group id's associated to corresponding partitions. */ private static Map<Integer, String> groupByGroupId(Collection<GroupPartitionId> grps) { return grps.stream() .collect(Collectors.groupingBy(GroupPartitionId::getGroupId, Collectors.mapping(GroupPartitionId::getPartitionId, Collectors.toSet()))) .entrySet() .stream() .collect(Collectors.toMap(Map.Entry::getKey, e -> S.toStringSortedDistinct(e.getValue()))); } /** * @param ft File tree. * @param grpAndPart Group and partition. * @return Cache partition file in given tree. * @throws IgniteCheckedException If cache group doesn't exist. */ private File partitionFile(NodeFileTree ft, GroupPartitionId grpAndPart) throws IgniteCheckedException { if (grpAndPart.getGroupId() == MetaStorage.METASTORAGE_CACHE_ID) return ft.metaStoragePartition(grpAndPart.getPartitionId()); CacheGroupContext gctx = cctx.cache().cacheGroup(grpAndPart.getGroupId()); if (gctx == null) throw new IgniteCheckedException("Cache group context has not found due to the cache group is stopped."); return ft.partitionFile(gctx.config(), grpAndPart.getPartitionId()); } /** @return Storage path. */ private String storagePath(GroupPartitionId grpAndPart) throws IgniteCheckedException { if (grpAndPart.getGroupId() == MetaStorage.METASTORAGE_CACHE_ID) return null; CacheGroupContext gctx = cctx.cache().cacheGroup(grpAndPart.getGroupId()); if (gctx == null) throw new IgniteCheckedException("Cache group context has not found due to the cache group is stopped."); return FileTreeUtils.partitionStorage(gctx.config(), grpAndPart.getPartitionId()); } /** {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; SnapshotFutureTask ctx = (SnapshotFutureTask)o; return snpName.equals(ctx.snpName); } /** {@inheritDoc} */ @Override public int hashCode() { return Objects.hash(snpName); } /** {@inheritDoc} */ @Override public String toString() { return S.toString(SnapshotFutureTask.class, this, super.toString()); } /** */ private class CacheConfigurationSender implements BiConsumer<String, File>, Closeable { /** Cache configuration. */ private final CacheConfiguration<?, ?> ccfg; /** Lock for cache configuration processing. */ private final Lock lock = new ReentrantLock(); /** Configuration file to send. */ private volatile File ccfgFile; /** {@code true} if configuration file already sent. */ private volatile boolean sent; /** * {@code true} if an old configuration file written to the temp directory and * waiting to be sent. */ private volatile boolean fromTemp; /** * @param ccfg Cache configuration. * @param ccfgFile Cache configuration to send. */ public CacheConfigurationSender(CacheConfiguration<?, ?> ccfg, File ccfgFile) { this.ccfg = ccfg; this.ccfgFile = ccfgFile; cctx.cache().configManager().addConfigurationChangeListener(this); } /** * Send the original cache configuration file or the temp one instead saved due to * concurrent configuration change operation happened (e.g. SQL add/drop column). */ public void sendCacheConfig() { lock.lock(); try { snpSndr.sendCacheConfig(ccfgFile, ccfg); close0(); } finally { lock.unlock(); } } /** {@inheritDoc} */ @Override public void accept(String cacheName, File ccfgFile) { assert ccfgFile.exists() : "Cache configuration file must exist [cacheName=" + cacheName + ", ccfgFile=" + ccfgFile.getAbsolutePath() + ']'; if (stopping()) return; if (!cacheName.equals(ccfg.getName()) || sent || fromTemp) return; lock.lock(); try { if (sent || fromTemp) return; File cfgTmpRoot = sft.tempFileTree().cacheConfigurationFile(ccfg).getParentFile(); if (!U.mkdirs(cfgTmpRoot)) throw new IOException("Unable to create temp directory to copy original configuration file: " + cfgTmpRoot); File newCcfgFile = new File(cfgTmpRoot, ccfgFile.getName()); newCcfgFile.createNewFile(); copy(ioFactory, ccfgFile, newCcfgFile, ccfgFile.length()); this.ccfgFile = newCcfgFile; fromTemp = true; } catch (IOException e) { acceptException(e); } finally { lock.unlock(); } } /** Close writer and remove listener. */ private void close0() { sent = true; cctx.cache().configManager().removeConfigurationChangeListener(this); if (fromTemp) U.delete(ccfgFile); } /** {@inheritDoc} */ @Override public void close() { lock.lock(); try { close0(); } finally { lock.unlock(); } } } /** */ private class PageStoreSerialWriter implements PageWriteListener, Closeable { /** Page store to which current writer is related to. */ @GridToStringExclude protected final PageStore store; /** Partition delta file to store delta pages into. */ protected final File deltaFile; /** Id of encrypted cache group. If {@code null}, no encrypted IO is used. */ private final Integer encryptedGrpId; /** Busy lock to protect write operations. */ private final ReadWriteLock lock = new ReentrantReadWriteLock(); /** {@code true} if need the original page from PageStore instead of given buffer. */ @GridToStringExclude private final BooleanSupplier checkpointComplete = () -> cpEndFut.isDone() && !cpEndFut.isCompletedExceptionally(); /** * Array of bits. 1 - means pages written, 0 - the otherwise. * Size of array can be estimated only under checkpoint write lock. */ private final AtomicBitSet writtenPages; /** IO over the underlying delta file. */ @GridToStringExclude private volatile FileIO deltaFileIo; /** {@code true} if partition file has been copied to external resource. */ private volatile boolean partProcessed; /** * @param store Partition page store. * @param deltaFile Destination file to write pages to. * @param encryptedGrpId Id of encrypted cache group. If {@code null}, no encrypted IO is used. */ public PageStoreSerialWriter(PageStore store, File deltaFile, @Nullable Integer encryptedGrpId) { assert store != null; assert cctx.database().checkpointLockIsHeldByThread(); this.deltaFile = deltaFile; this.store = store; // It is important to init {@link AtomicBitSet} under the checkpoint write-lock. // This guarantee us that no pages will be modified and it's safe to init pages // list which needs to be processed. writtenPages = new AtomicBitSet(store.pages()); this.encryptedGrpId = encryptedGrpId; store.addWriteListener(this); } /** * @return {@code true} if writer is stopped and cannot write pages. */ public boolean stopped() { return (checkpointComplete.getAsBoolean() && partProcessed) || stopping(); } /** * Mark partition has been processed by another thread. */ public void markPartitionProcessed() { lock.writeLock().lock(); try { partProcessed = true; } finally { lock.writeLock().unlock(); } } /** */ protected void init() throws IOException { deltaFileIo = (encryptedGrpId == null ? ioFactory : pageStore.encryptedFileIoFactory(ioFactory, encryptedGrpId)).create(deltaFile); } /** {@inheritDoc} */ @Override public void accept(long pageId, ByteBuffer buf) { assert buf.position() == 0 : buf.position(); assert buf.order() == ByteOrder.nativeOrder() : buf.order(); if (deltaFileIo == null) { lock.writeLock().lock(); try { if (stopped()) return; if (deltaFileIo == null) init(); } catch (IOException e) { acceptException(e); } finally { lock.writeLock().unlock(); } } int pageIdx = -1; lock.readLock().lock(); try { if (stopped()) return; pageIdx = PageIdUtils.pageIndex(pageId); if (checkpointComplete.getAsBoolean()) { // Page already written. if (!writtenPages.touch(pageIdx)) return; final ByteBuffer locBuf = locBuff.get(); assert locBuf.capacity() == store.getPageSize(); locBuf.clear(); if (!store.read(pageId, locBuf, true)) return; locBuf.clear(); writePage0(pageId, locBuf); } else { // Direct buffer is needs to be written, associated checkpoint not finished yet. if (PageIO.getCompressionType(GridUnsafe.bufferAddress(buf)) != CompressionProcessor.UNCOMPRESSED_PAGE) { final ByteBuffer locBuf = locBuff.get(); assert locBuf.capacity() == store.getPageSize(); locBuf.clear(); GridUnsafe.copyOffheapOffheap(GridUnsafe.bufferAddress(buf), GridUnsafe.bufferAddress(locBuf), buf.limit()); locBuf.limit(locBuf.capacity()); locBuf.position(0); buf = locBuf; } writePage0(pageId, buf); // Page marked as written to delta file, so there is no need to // copy it from file when the first checkpoint associated with // current snapshot task ends. writtenPages.touch(pageIdx); } } catch (Throwable ex) { acceptException(new IgniteCheckedException("Error during writing pages to delta partition file " + "[pageIdx=" + pageIdx + ", writer=" + this + ']', ex)); } finally { lock.readLock().unlock(); } } /** * @param pageId Page ID. * @param pageBuf Page buffer to write. * @throws IOException If page writing failed (IO error occurred). */ protected synchronized void writePage0(long pageId, ByteBuffer pageBuf) throws IOException { assert deltaFileIo != null : "Delta pages storage is not inited: " + this; assert pageBuf.position() == 0; assert pageBuf.order() == ByteOrder.nativeOrder() : "Page buffer order " + pageBuf.order() + " should be same with " + ByteOrder.nativeOrder(); if (log.isDebugEnabled()) { log.debug("onPageWrite [pageId=" + pageId + ", pageIdBuff=" + PageIO.getPageId(pageBuf) + ", fileSize=" + deltaFileIo.size() + ", crcBuff=" + FastCrc.calcCrc(pageBuf, pageBuf.limit()) + ", crcPage=" + PageIO.getCrc(pageBuf) + ']'); pageBuf.rewind(); } // Write buffer to the end of the file. int len = deltaFileIo.writeFully(pageBuf); assert len == pageBuf.capacity(); totalSize.addAndGet(len); } /** {@inheritDoc} */ @Override public void close() { lock.writeLock().lock(); try { U.closeQuiet(deltaFileIo); deltaFileIo = null; store.removeWriteListener(this); } finally { lock.writeLock().unlock(); } } /** {@inheritDoc} */ @Override public String toString() { return S.toString(PageStoreSerialWriter.class, this); } } /** @see IgniteSnapshotManager.DeltaSortedIterator */ private class IndexedPageStoreSerialWriter extends PageStoreSerialWriter { /** Delta index file IO. */ @GridToStringExclude private volatile FileIO idxIo; /** Buffer of page indexes written to the delta. */ private volatile ByteBuffer pageIdxs; /** */ public IndexedPageStoreSerialWriter(PageStore store, File deltaFile, @Nullable Integer encryptedGrpId) { super(store, deltaFile, encryptedGrpId); } /** {@inheritDoc} */ @Override protected void init() throws IOException { super.init(); idxIo = ioFactory.create(partDeltaIndexFile(deltaFile)); pageIdxs = ByteBuffer.allocate(store.getPageSize()).order(ByteOrder.nativeOrder()); assert pageIdxs.capacity() % 4 == 0; } /** {@inheritDoc} */ @Override protected synchronized void writePage0(long pageId, ByteBuffer pageBuf) throws IOException { super.writePage0(pageId, pageBuf); pageIdxs.putInt(PageIdUtils.pageIndex(pageId)); if (!pageIdxs.hasRemaining()) flush(); } /** Flush buffer with page indexes to the file. */ private void flush() throws IOException { pageIdxs.flip(); idxIo.writeFully(pageIdxs); pageIdxs.clear(); } /** {@inheritDoc} */ @Override public void close() { super.close(); try { if (idxIo != null) flush(); } catch (IOException e) { acceptException(new IgniteCheckedException("Error during writing page indexes to delta " + "partition index file [writer=" + this + ']', e)); } U.closeQuiet(idxIo); idxIo = null; } } /** * */ private static class AtomicBitSet { /** Container of bits. */ private final AtomicIntegerArray arr; /** Size of array of bits. */ private final int size; /** * @param size Size of array. */ public AtomicBitSet(int size) { this.size = size; arr = new AtomicIntegerArray((size + 31) >>> 5); } /** * @param off Bit position to change. * @return {@code true} if bit has been set, * {@code false} if bit changed by another thread or out of range. */ public boolean touch(long off) { if (off >= size) return false; int bit = 1 << off; int bucket = (int)(off >>> 5); while (true) { int cur = arr.get(bucket); int val = cur | bit; if (cur == val) return false; if (arr.compareAndSet(bucket, cur, val)) return true; } } } }
apache/lucene
33,025
lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/dict/ToStringUtil.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.lucene.analysis.ja.dict; import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; /** Utility class for english translations of morphological data, used only for debugging. */ public class ToStringUtil { // a translation map for parts of speech, only used for reflectWith private static final Map<String, String> posTranslations; static { Map<String, String> translations = new HashMap<>(); translations.put("名詞", "noun"); translations.put("名詞-一般", "noun-common"); translations.put("名詞-固有名詞", "noun-proper"); translations.put("名詞-固有名詞-一般", "noun-proper-misc"); translations.put("名詞-固有名詞-人名", "noun-proper-person"); translations.put("名詞-固有名詞-人名-一般", "noun-proper-person-misc"); translations.put("名詞-固有名詞-人名-姓", "noun-proper-person-surname"); translations.put("名詞-固有名詞-人名-名", "noun-proper-person-given_name"); translations.put("名詞-固有名詞-組織", "noun-proper-organization"); translations.put("名詞-固有名詞-地域", "noun-proper-place"); translations.put("名詞-固有名詞-地域-一般", "noun-proper-place-misc"); translations.put("名詞-固有名詞-地域-国", "noun-proper-place-country"); translations.put("名詞-代名詞", "noun-pronoun"); translations.put("名詞-代名詞-一般", "noun-pronoun-misc"); translations.put("名詞-代名詞-縮約", "noun-pronoun-contraction"); translations.put("名詞-副詞可能", "noun-adverbial"); translations.put("名詞-サ変接続", "noun-verbal"); translations.put("名詞-形容動詞語幹", "noun-adjective-base"); translations.put("名詞-数", "noun-numeric"); translations.put("名詞-非自立", "noun-affix"); translations.put("名詞-非自立-一般", "noun-affix-misc"); translations.put("名詞-非自立-副詞可能", "noun-affix-adverbial"); translations.put("名詞-非自立-助動詞語幹", "noun-affix-aux"); translations.put("名詞-非自立-形容動詞語幹", "noun-affix-adjective-base"); translations.put("名詞-特殊", "noun-special"); translations.put("名詞-特殊-助動詞語幹", "noun-special-aux"); translations.put("名詞-接尾", "noun-suffix"); translations.put("名詞-接尾-一般", "noun-suffix-misc"); translations.put("名詞-接尾-人名", "noun-suffix-person"); translations.put("名詞-接尾-地域", "noun-suffix-place"); translations.put("名詞-接尾-サ変接続", "noun-suffix-verbal"); translations.put("名詞-接尾-助動詞語幹", "noun-suffix-aux"); translations.put("名詞-接尾-形容動詞語幹", "noun-suffix-adjective-base"); translations.put("名詞-接尾-副詞可能", "noun-suffix-adverbial"); translations.put("名詞-接尾-助数詞", "noun-suffix-classifier"); translations.put("名詞-接尾-特殊", "noun-suffix-special"); translations.put("名詞-接続詞的", "noun-suffix-conjunctive"); translations.put("名詞-動詞非自立的", "noun-verbal_aux"); translations.put("名詞-引用文字列", "noun-quotation"); translations.put("名詞-ナイ形容詞語幹", "noun-nai_adjective"); translations.put("接頭詞", "prefix"); translations.put("接頭詞-名詞接続", "prefix-nominal"); translations.put("接頭詞-動詞接続", "prefix-verbal"); translations.put("接頭詞-形容詞接続", "prefix-adjectival"); translations.put("接頭詞-数接続", "prefix-numerical"); translations.put("動詞", "verb"); translations.put("動詞-自立", "verb-main"); translations.put("動詞-非自立", "verb-auxiliary"); translations.put("動詞-接尾", "verb-suffix"); translations.put("形容詞", "adjective"); translations.put("形容詞-自立", "adjective-main"); translations.put("形容詞-非自立", "adjective-auxiliary"); translations.put("形容詞-接尾", "adjective-suffix"); translations.put("副詞", "adverb"); translations.put("副詞-一般", "adverb-misc"); translations.put("副詞-助詞類接続", "adverb-particle_conjunction"); translations.put("連体詞", "adnominal"); translations.put("接続詞", "conjunction"); translations.put("助詞", "particle"); translations.put("助詞-格助詞", "particle-case"); translations.put("助詞-格助詞-一般", "particle-case-misc"); translations.put("助詞-格助詞-引用", "particle-case-quote"); translations.put("助詞-格助詞-連語", "particle-case-compound"); translations.put("助詞-接続助詞", "particle-conjunctive"); translations.put("助詞-係助詞", "particle-dependency"); translations.put("助詞-副助詞", "particle-adverbial"); translations.put("助詞-間投助詞", "particle-interjective"); translations.put("助詞-並立助詞", "particle-coordinate"); translations.put("助詞-終助詞", "particle-final"); translations.put("助詞-副助詞/並立助詞/終助詞", "particle-adverbial/conjunctive/final"); translations.put("助詞-連体化", "particle-adnominalizer"); translations.put("助詞-副詞化", "particle-adnominalizer"); translations.put("助詞-特殊", "particle-special"); translations.put("助動詞", "auxiliary-verb"); translations.put("感動詞", "interjection"); translations.put("記号", "symbol"); translations.put("記号-一般", "symbol-misc"); translations.put("記号-句点", "symbol-period"); translations.put("記号-読点", "symbol-comma"); translations.put("記号-空白", "symbol-space"); translations.put("記号-括弧開", "symbol-open_bracket"); translations.put("記号-括弧閉", "symbol-close_bracket"); translations.put("記号-アルファベット", "symbol-alphabetic"); translations.put("その他", "other"); translations.put("その他-間投", "other-interjection"); translations.put("フィラー", "filler"); translations.put("非言語音", "non-verbal"); translations.put("語断片", "fragment"); translations.put("未知語", "unknown"); posTranslations = Collections.unmodifiableMap(translations); } /** Get the english form of a POS tag */ public static String getPOSTranslation(String s) { return posTranslations.get(s); } // a translation map for inflection types, only used for reflectWith private static final Map<String, String> inflTypeTranslations; static { Map<String, String> translations = new HashMap<>(); translations.put("*", "*"); translations.put("形容詞・アウオ段", "adj-group-a-o-u"); translations.put("形容詞・イ段", "adj-group-i"); translations.put("形容詞・イイ", "adj-group-ii"); translations.put("不変化型", "non-inflectional"); translations.put("特殊・タ", "special-da"); translations.put("特殊・ダ", "special-ta"); translations.put("文語・ゴトシ", "classical-gotoshi"); translations.put("特殊・ジャ", "special-ja"); translations.put("特殊・ナイ", "special-nai"); translations.put("五段・ラ行特殊", "5-row-cons-r-special"); translations.put("特殊・ヌ", "special-nu"); translations.put("文語・キ", "classical-ki"); translations.put("特殊・タイ", "special-tai"); translations.put("文語・ベシ", "classical-beshi"); translations.put("特殊・ヤ", "special-ya"); translations.put("文語・マジ", "classical-maji"); translations.put("下二・タ行", "2-row-lower-cons-t"); translations.put("特殊・デス", "special-desu"); translations.put("特殊・マス", "special-masu"); translations.put("五段・ラ行アル", "5-row-aru"); translations.put("文語・ナリ", "classical-nari"); translations.put("文語・リ", "classical-ri"); translations.put("文語・ケリ", "classical-keri"); translations.put("文語・ル", "classical-ru"); translations.put("五段・カ行イ音便", "5-row-cons-k-i-onbin"); translations.put("五段・サ行", "5-row-cons-s"); translations.put("一段", "1-row"); translations.put("五段・ワ行促音便", "5-row-cons-w-cons-onbin"); translations.put("五段・マ行", "5-row-cons-m"); translations.put("五段・タ行", "5-row-cons-t"); translations.put("五段・ラ行", "5-row-cons-r"); translations.put("サ変・−スル", "irregular-suffix-suru"); translations.put("五段・ガ行", "5-row-cons-g"); translations.put("サ変・−ズル", "irregular-suffix-zuru"); translations.put("五段・バ行", "5-row-cons-b"); translations.put("五段・ワ行ウ音便", "5-row-cons-w-u-onbin"); translations.put("下二・ダ行", "2-row-lower-cons-d"); translations.put("五段・カ行促音便ユク", "5-row-cons-k-cons-onbin-yuku"); translations.put("上二・ダ行", "2-row-upper-cons-d"); translations.put("五段・カ行促音便", "5-row-cons-k-cons-onbin"); translations.put("一段・得ル", "1-row-eru"); translations.put("四段・タ行", "4-row-cons-t"); translations.put("五段・ナ行", "5-row-cons-n"); translations.put("下二・ハ行", "2-row-lower-cons-h"); translations.put("四段・ハ行", "4-row-cons-h"); translations.put("四段・バ行", "4-row-cons-b"); translations.put("サ変・スル", "irregular-suru"); translations.put("上二・ハ行", "2-row-upper-cons-h"); translations.put("下二・マ行", "2-row-lower-cons-m"); translations.put("四段・サ行", "4-row-cons-s"); translations.put("下二・ガ行", "2-row-lower-cons-g"); translations.put("カ変・来ル", "kuru-kanji"); translations.put("一段・クレル", "1-row-kureru"); translations.put("下二・得", "2-row-lower-u"); translations.put("カ変・クル", "kuru-kana"); translations.put("ラ変", "irregular-cons-r"); translations.put("下二・カ行", "2-row-lower-cons-k"); inflTypeTranslations = Collections.unmodifiableMap(translations); } /** Get the english form of inflection type */ public static String getInflectionTypeTranslation(String s) { return inflTypeTranslations.get(s); } // a translation map for inflection forms, only used for reflectWith private static final Map<String, String> inflFormTranslations; static { Map<String, String> translations = new HashMap<>(); translations.put("*", "*"); translations.put("基本形", "base"); translations.put("文語基本形", "classical-base"); translations.put("未然ヌ接続", "imperfective-nu-connection"); translations.put("未然ウ接続", "imperfective-u-connection"); translations.put("連用タ接続", "conjunctive-ta-connection"); translations.put("連用テ接続", "conjunctive-te-connection"); translations.put("連用ゴザイ接続", "conjunctive-gozai-connection"); translations.put("体言接続", "uninflected-connection"); translations.put("仮定形", "subjunctive"); translations.put("命令e", "imperative-e"); translations.put("仮定縮約1", "conditional-contracted-1"); translations.put("仮定縮約2", "conditional-contracted-2"); translations.put("ガル接続", "garu-connection"); translations.put("未然形", "imperfective"); translations.put("連用形", "conjunctive"); translations.put("音便基本形", "onbin-base"); translations.put("連用デ接続", "conjunctive-de-connection"); translations.put("未然特殊", "imperfective-special"); translations.put("命令i", "imperative-i"); translations.put("連用ニ接続", "conjunctive-ni-connection"); translations.put("命令yo", "imperative-yo"); translations.put("体言接続特殊", "adnominal-special"); translations.put("命令ro", "imperative-ro"); translations.put("体言接続特殊2", "uninflected-special-connection-2"); translations.put("未然レル接続", "imperfective-reru-connection"); translations.put("現代基本形", "modern-base"); translations.put("基本形-促音便", "base-onbin"); // not sure about this inflFormTranslations = Collections.unmodifiableMap(translations); } /** Get the english form of inflected form */ public static String getInflectedFormTranslation(String s) { return inflFormTranslations.get(s); } /** Romanize katakana with modified hepburn */ public static String getRomanization(String s) { StringBuilder out = new StringBuilder(); try { getRomanization(out, s); } catch (IOException bogus) { throw new RuntimeException(bogus); } return out.toString(); } /** Romanize katakana with modified hepburn */ // TODO: now that this is used by readingsfilter and not just for // debugging, fix this to really be a scheme that works best with IMEs public static void getRomanization(Appendable builder, CharSequence s) throws IOException { final int len = s.length(); for (int i = 0; i < len; i++) { // maximum lookahead: 3 char ch = s.charAt(i); char ch2 = (i < len - 1) ? s.charAt(i + 1) : 0; char ch3 = (i < len - 2) ? s.charAt(i + 2) : 0; main: switch (ch) { case 'ッ': switch (ch2) { case 'カ': case 'キ': case 'ク': case 'ケ': case 'コ': builder.append('k'); break main; case 'サ': case 'シ': case 'ス': case 'セ': case 'ソ': builder.append('s'); break main; case 'タ': case 'チ': case 'ツ': case 'テ': case 'ト': builder.append('t'); break main; case 'パ': case 'ピ': case 'プ': case 'ペ': case 'ポ': builder.append('p'); break main; } break; case 'ア': builder.append('a'); break; case 'イ': if (ch2 == 'ィ') { builder.append("yi"); i++; } else if (ch2 == 'ェ') { builder.append("ye"); i++; } else { builder.append('i'); } break; case 'ウ': switch (ch2) { case 'ァ': builder.append("wa"); i++; break; case 'ィ': builder.append("wi"); i++; break; case 'ゥ': builder.append("wu"); i++; break; case 'ェ': builder.append("we"); i++; break; case 'ォ': builder.append("wo"); i++; break; case 'ュ': builder.append("wyu"); i++; break; default: builder.append('u'); break; } break; case 'エ': builder.append('e'); break; case 'オ': if (ch2 == 'ウ') { builder.append('ō'); i++; } else { builder.append('o'); } break; case 'カ': builder.append("ka"); break; case 'キ': if (ch2 == 'ョ' && ch3 == 'ウ') { builder.append("kyō"); i += 2; } else if (ch2 == 'ュ' && ch3 == 'ウ') { builder.append("kyū"); i += 2; } else if (ch2 == 'ャ') { builder.append("kya"); i++; } else if (ch2 == 'ョ') { builder.append("kyo"); i++; } else if (ch2 == 'ュ') { builder.append("kyu"); i++; } else if (ch2 == 'ェ') { builder.append("kye"); i++; } else { builder.append("ki"); } break; case 'ク': switch (ch2) { case 'ァ': builder.append("kwa"); i++; break; case 'ィ': builder.append("kwi"); i++; break; case 'ェ': builder.append("kwe"); i++; break; case 'ォ': builder.append("kwo"); i++; break; case 'ヮ': builder.append("kwa"); i++; break; default: builder.append("ku"); break; } break; case 'ケ': builder.append("ke"); break; case 'コ': if (ch2 == 'ウ') { builder.append("kō"); i++; } else { builder.append("ko"); } break; case 'サ': builder.append("sa"); break; case 'シ': if (ch2 == 'ョ' && ch3 == 'ウ') { builder.append("shō"); i += 2; } else if (ch2 == 'ュ' && ch3 == 'ウ') { builder.append("shū"); i += 2; } else if (ch2 == 'ャ') { builder.append("sha"); i++; } else if (ch2 == 'ョ') { builder.append("sho"); i++; } else if (ch2 == 'ュ') { builder.append("shu"); i++; } else if (ch2 == 'ェ') { builder.append("she"); i++; } else { builder.append("shi"); } break; case 'ス': if (ch2 == 'ィ') { builder.append("si"); i++; } else { builder.append("su"); } break; case 'セ': builder.append("se"); break; case 'ソ': if (ch2 == 'ウ') { builder.append("sō"); i++; } else { builder.append("so"); } break; case 'タ': builder.append("ta"); break; case 'チ': if (ch2 == 'ョ' && ch3 == 'ウ') { builder.append("chō"); i += 2; } else if (ch2 == 'ュ' && ch3 == 'ウ') { builder.append("chū"); i += 2; } else if (ch2 == 'ャ') { builder.append("cha"); i++; } else if (ch2 == 'ョ') { builder.append("cho"); i++; } else if (ch2 == 'ュ') { builder.append("chu"); i++; } else if (ch2 == 'ェ') { builder.append("che"); i++; } else { builder.append("chi"); } break; case 'ツ': if (ch2 == 'ァ') { builder.append("tsa"); i++; } else if (ch2 == 'ィ') { builder.append("tsi"); i++; } else if (ch2 == 'ェ') { builder.append("tse"); i++; } else if (ch2 == 'ォ') { builder.append("tso"); i++; } else if (ch2 == 'ュ') { builder.append("tsyu"); i++; } else { builder.append("tsu"); } break; case 'テ': if (ch2 == 'ィ') { builder.append("ti"); i++; } else if (ch2 == 'ゥ') { builder.append("tu"); i++; } else if (ch2 == 'ュ') { builder.append("tyu"); i++; } else { builder.append("te"); } break; case 'ト': if (ch2 == 'ウ') { builder.append("tō"); i++; } else if (ch2 == 'ゥ') { builder.append("tu"); i++; } else { builder.append("to"); } break; case 'ナ': builder.append("na"); break; case 'ニ': if (ch2 == 'ョ' && ch3 == 'ウ') { builder.append("nyō"); i += 2; } else if (ch2 == 'ュ' && ch3 == 'ウ') { builder.append("nyū"); i += 2; } else if (ch2 == 'ャ') { builder.append("nya"); i++; } else if (ch2 == 'ョ') { builder.append("nyo"); i++; } else if (ch2 == 'ュ') { builder.append("nyu"); i++; } else if (ch2 == 'ェ') { builder.append("nye"); i++; } else { builder.append("ni"); } break; case 'ヌ': builder.append("nu"); break; case 'ネ': builder.append("ne"); break; case 'ノ': if (ch2 == 'ウ') { builder.append("nō"); i++; } else { builder.append("no"); } break; case 'ハ': builder.append("ha"); break; case 'ヒ': if (ch2 == 'ョ' && ch3 == 'ウ') { builder.append("hyō"); i += 2; } else if (ch2 == 'ュ' && ch3 == 'ウ') { builder.append("hyū"); i += 2; } else if (ch2 == 'ャ') { builder.append("hya"); i++; } else if (ch2 == 'ョ') { builder.append("hyo"); i++; } else if (ch2 == 'ュ') { builder.append("hyu"); i++; } else if (ch2 == 'ェ') { builder.append("hye"); i++; } else { builder.append("hi"); } break; case 'フ': if (ch2 == 'ャ') { builder.append("fya"); i++; } else if (ch2 == 'ュ') { builder.append("fyu"); i++; } else if (ch2 == 'ィ' && ch3 == 'ェ') { builder.append("fye"); i += 2; } else if (ch2 == 'ョ') { builder.append("fyo"); i++; } else if (ch2 == 'ァ') { builder.append("fa"); i++; } else if (ch2 == 'ィ') { builder.append("fi"); i++; } else if (ch2 == 'ェ') { builder.append("fe"); i++; } else if (ch2 == 'ォ') { builder.append("fo"); i++; } else { builder.append("fu"); } break; case 'ヘ': builder.append("he"); break; case 'ホ': if (ch2 == 'ウ') { builder.append("hō"); i++; } else if (ch2 == 'ゥ') { builder.append("hu"); i++; } else { builder.append("ho"); } break; case 'マ': builder.append("ma"); break; case 'ミ': if (ch2 == 'ョ' && ch3 == 'ウ') { builder.append("myō"); i += 2; } else if (ch2 == 'ュ' && ch3 == 'ウ') { builder.append("myū"); i += 2; } else if (ch2 == 'ャ') { builder.append("mya"); i++; } else if (ch2 == 'ョ') { builder.append("myo"); i++; } else if (ch2 == 'ュ') { builder.append("myu"); i++; } else if (ch2 == 'ェ') { builder.append("mye"); i++; } else { builder.append("mi"); } break; case 'ム': builder.append("mu"); break; case 'メ': builder.append("me"); break; case 'モ': if (ch2 == 'ウ') { builder.append("mō"); i++; } else { builder.append("mo"); } break; case 'ヤ': builder.append("ya"); break; case 'ユ': builder.append("yu"); break; case 'ヨ': if (ch2 == 'ウ') { builder.append("yō"); i++; } else { builder.append("yo"); } break; case 'ラ': if (ch2 == '゜') { builder.append("la"); i++; } else { builder.append("ra"); } break; case 'リ': if (ch2 == 'ョ' && ch3 == 'ウ') { builder.append("ryō"); i += 2; } else if (ch2 == 'ュ' && ch3 == 'ウ') { builder.append("ryū"); i += 2; } else if (ch2 == 'ャ') { builder.append("rya"); i++; } else if (ch2 == 'ョ') { builder.append("ryo"); i++; } else if (ch2 == 'ュ') { builder.append("ryu"); i++; } else if (ch2 == 'ェ') { builder.append("rye"); i++; } else if (ch2 == '゜') { builder.append("li"); i++; } else { builder.append("ri"); } break; case 'ル': if (ch2 == '゜') { builder.append("lu"); i++; } else { builder.append("ru"); } break; case 'レ': if (ch2 == '゜') { builder.append("le"); i++; } else { builder.append("re"); } break; case 'ロ': if (ch2 == 'ウ') { builder.append("rō"); i++; } else if (ch2 == '゜') { builder.append("lo"); i++; } else { builder.append("ro"); } break; case 'ワ': builder.append("wa"); break; case 'ヰ': builder.append("i"); break; case 'ヱ': builder.append("e"); break; case 'ヲ': builder.append("o"); break; case 'ン': switch (ch2) { case 'バ': case 'ビ': case 'ブ': case 'ベ': case 'ボ': case 'パ': case 'ピ': case 'プ': case 'ペ': case 'ポ': case 'マ': case 'ミ': case 'ム': case 'メ': case 'モ': builder.append('m'); break main; case 'ヤ': case 'ユ': case 'ヨ': case 'ア': case 'イ': case 'ウ': case 'エ': case 'オ': builder.append("n'"); break main; default: builder.append("n"); break main; } case 'ガ': builder.append("ga"); break; case 'ギ': if (ch2 == 'ョ' && ch3 == 'ウ') { builder.append("gyō"); i += 2; } else if (ch2 == 'ュ' && ch3 == 'ウ') { builder.append("gyū"); i += 2; } else if (ch2 == 'ャ') { builder.append("gya"); i++; } else if (ch2 == 'ョ') { builder.append("gyo"); i++; } else if (ch2 == 'ュ') { builder.append("gyu"); i++; } else if (ch2 == 'ェ') { builder.append("gye"); i++; } else { builder.append("gi"); } break; case 'グ': switch (ch2) { case 'ァ': builder.append("gwa"); i++; break; case 'ィ': builder.append("gwi"); i++; break; case 'ェ': builder.append("gwe"); i++; break; case 'ォ': builder.append("gwo"); i++; break; case 'ヮ': builder.append("gwa"); i++; break; default: builder.append("gu"); break; } break; case 'ゲ': builder.append("ge"); break; case 'ゴ': if (ch2 == 'ウ') { builder.append("gō"); i++; } else { builder.append("go"); } break; case 'ザ': builder.append("za"); break; case 'ジ': if (ch2 == 'ョ' && ch3 == 'ウ') { builder.append("jō"); i += 2; } else if (ch2 == 'ュ' && ch3 == 'ウ') { builder.append("jū"); i += 2; } else if (ch2 == 'ャ') { builder.append("ja"); i++; } else if (ch2 == 'ョ') { builder.append("jo"); i++; } else if (ch2 == 'ュ') { builder.append("ju"); i++; } else if (ch2 == 'ェ') { builder.append("je"); i++; } else { builder.append("ji"); } break; case 'ズ': if (ch2 == 'ィ') { builder.append("zi"); i++; } else { builder.append("zu"); } break; case 'ゼ': builder.append("ze"); break; case 'ゾ': if (ch2 == 'ウ') { builder.append("zō"); i++; } else { builder.append("zo"); } break; case 'ダ': builder.append("da"); break; case 'ヂ': // TODO: investigate all this if (ch2 == 'ョ' && ch3 == 'ウ') { builder.append("jō"); i += 2; } else if (ch2 == 'ュ' && ch3 == 'ウ') { builder.append("jū"); i += 2; } else if (ch2 == 'ャ') { builder.append("ja"); i++; } else if (ch2 == 'ョ') { builder.append("jo"); i++; } else if (ch2 == 'ュ') { builder.append("ju"); i++; } else if (ch2 == 'ェ') { builder.append("je"); i++; } else { builder.append("ji"); } break; case 'ヅ': builder.append("zu"); break; case 'デ': if (ch2 == 'ィ') { builder.append("di"); i++; } else if (ch2 == 'ュ') { builder.append("dyu"); i++; } else { builder.append("de"); } break; case 'ド': if (ch2 == 'ウ') { builder.append("dō"); i++; } else if (ch2 == 'ゥ') { builder.append("du"); i++; } else { builder.append("do"); } break; case 'バ': builder.append("ba"); break; case 'ビ': if (ch2 == 'ョ' && ch3 == 'ウ') { builder.append("byō"); i += 2; } else if (ch2 == 'ュ' && ch3 == 'ウ') { builder.append("byū"); i += 2; } else if (ch2 == 'ャ') { builder.append("bya"); i++; } else if (ch2 == 'ョ') { builder.append("byo"); i++; } else if (ch2 == 'ュ') { builder.append("byu"); i++; } else if (ch2 == 'ェ') { builder.append("bye"); i++; } else { builder.append("bi"); } break; case 'ブ': builder.append("bu"); break; case 'ベ': builder.append("be"); break; case 'ボ': if (ch2 == 'ウ') { builder.append("bō"); i++; } else { builder.append("bo"); } break; case 'パ': builder.append("pa"); break; case 'ピ': if (ch2 == 'ョ' && ch3 == 'ウ') { builder.append("pyō"); i += 2; } else if (ch2 == 'ュ' && ch3 == 'ウ') { builder.append("pyū"); i += 2; } else if (ch2 == 'ャ') { builder.append("pya"); i++; } else if (ch2 == 'ョ') { builder.append("pyo"); i++; } else if (ch2 == 'ュ') { builder.append("pyu"); i++; } else if (ch2 == 'ェ') { builder.append("pye"); i++; } else { builder.append("pi"); } break; case 'プ': builder.append("pu"); break; case 'ペ': builder.append("pe"); break; case 'ポ': if (ch2 == 'ウ') { builder.append("pō"); i++; } else { builder.append("po"); } break; case 'ヷ': builder.append("va"); break; case 'ヸ': builder.append("vi"); break; case 'ヹ': builder.append("ve"); break; case 'ヺ': builder.append("vo"); break; case 'ヴ': if (ch2 == 'ィ' && ch3 == 'ェ') { builder.append("vye"); i += 2; } else { builder.append('v'); } break; case 'ァ': builder.append('a'); break; case 'ィ': builder.append('i'); break; case 'ゥ': builder.append('u'); break; case 'ェ': builder.append('e'); break; case 'ォ': builder.append('o'); break; case 'ヮ': builder.append("wa"); break; case 'ャ': builder.append("ya"); break; case 'ュ': builder.append("yu"); break; case 'ョ': builder.append("yo"); break; case 'ー': break; default: builder.append(ch); } } } }
apache/parquet-java
36,835
parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestEncryptionOptions.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.parquet.hadoop; import static org.apache.parquet.hadoop.ParquetFileWriter.Mode.OVERWRITE; import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.BOOLEAN; import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.INT32; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.parquet.crypto.ColumnDecryptionProperties; import org.apache.parquet.crypto.ColumnEncryptionProperties; import org.apache.parquet.crypto.DecryptionKeyRetrieverMock; import org.apache.parquet.crypto.FileDecryptionProperties; import org.apache.parquet.crypto.FileEncryptionProperties; import org.apache.parquet.crypto.ParquetCipher; import org.apache.parquet.crypto.ParquetCryptoRuntimeException; import org.apache.parquet.crypto.SingleRow; import org.apache.parquet.example.data.Group; import org.apache.parquet.example.data.simple.SimpleGroupFactory; import org.apache.parquet.hadoop.example.ExampleParquetWriter; import org.apache.parquet.hadoop.example.GroupReadSupport; import org.apache.parquet.hadoop.metadata.ColumnPath; import org.apache.parquet.io.api.Binary; import org.apache.parquet.schema.MessageType; import org.apache.parquet.schema.Types; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ErrorCollector; import org.junit.rules.TemporaryFolder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /* * This file contains samples for writing and reading encrypted Parquet files in different * encryption and decryption configurations. The samples have the following goals: * 1) Demonstrate usage of different options for data encryption and decryption. * 2) Produce encrypted files for interoperability tests with other (eg parquet-cpp) * readers that support encryption. * 3) Produce encrypted files with plaintext footer, for testing the ability of legacy * readers to parse the footer and read unencrypted columns. * * The write sample produces number of parquet files, each encrypted with a different * encryption configuration as described below. * The name of each file is in the form of: * tester<encryption config number>.parquet.encrypted. * * The read sample creates a set of decryption configurations and then uses each of them * to read all encrypted files in the input directory. * * The different encryption and decryption configurations are listed below. * * * A detailed description of the Parquet Modular Encryption specification can be found * here: * https://github.com/apache/parquet-format/blob/encryption/Encryption.md * * The write sample creates files with seven columns in the following * encryption configurations: * * UNIFORM_ENCRYPTION: Encrypt all columns and the footer with the same key. * (uniform encryption) * UNIFORM_ENCRYPTION_PLAINTEXT_FOOTER: Encrypt all columns with the same key. * Do not encrypt footer. * ENCRYPT_COLUMNS_AND_FOOTER: Encrypt six columns and the footer, with different * keys. * ENCRYPT_COLUMNS_PLAINTEXT_FOOTER: Encrypt six columns, with different keys. * Do not encrypt footer (to enable legacy readers) * - plaintext footer mode. * ENCRYPT_COLUMNS_PLAINTEXT_FOOTER_COMPLETE: Encrypt six columns with different keys * Do not encrypt footer. Encrypt the rest of the columns * with the footer key. * ENCRYPT_COLUMNS_AND_FOOTER_AAD: Encrypt six columns and the footer, with different * keys. Supply aad_prefix for file identity * verification. * ENCRYPT_COLUMNS_AND_FOOTER_DISABLE_AAD_STORAGE: Encrypt six columns and the footer, * with different keys. Supply aad_prefix, and call * disable_aad_prefix_storage to prevent file * identity storage in file metadata. * ENCRYPT_COLUMNS_AND_FOOTER_CTR: Encrypt six columns and the footer, with different * keys. Use the alternative (AES_GCM_CTR_V1) algorithm. * NO_ENCRYPTION: Do not encrypt anything * * * The read sample uses each of the following decryption configurations to read every * encrypted files in the input directory: * * DECRYPT_WITH_KEY_RETRIEVER: Decrypt using key retriever that holds the keys of * the encrypted columns and the footer key. * DECRYPT_WITH_KEY_RETRIEVER_AAD: Decrypt using key retriever that holds the keys of * the encrypted columns and the footer key. Supplies * aad_prefix to verify file identity. * DECRYPT_WITH_EXPLICIT_KEYS: Decrypt using explicit column and footer keys * (instead of key retrieval callback). * NO_DECRYPTION: Do not decrypt anything. */ public class TestEncryptionOptions { private static final Logger LOG = LoggerFactory.getLogger(TestEncryptionOptions.class); @Rule public TemporaryFolder temporaryFolder = new TemporaryFolder(); @Rule public ErrorCollector localErrorCollector = new ErrorCollector(); private ErrorCollector errorCollector; private InterOpTester interop = new InterOpTester(); private static final String CHANGESET = "40379b3"; private static final byte[] FOOTER_ENCRYPTION_KEY = "0123456789012345".getBytes(); private static final byte[][] COLUMN_ENCRYPTION_KEYS = { "1234567890123450".getBytes(), "1234567890123451".getBytes(), "1234567890123452".getBytes(), "1234567890123453".getBytes(), "1234567890123454".getBytes(), "1234567890123455".getBytes() }; private static final String[] COLUMN_ENCRYPTION_KEY_IDS = {"kc1", "kc2", "kc3", "kc4", "kc5", "kc6"}; private static final String FOOTER_ENCRYPTION_KEY_ID = "kf"; private static final String AAD_PREFIX_STRING = "tester"; private static final byte[] footerKeyMetadata = FOOTER_ENCRYPTION_KEY_ID.getBytes(StandardCharsets.UTF_8); private static final byte[] AADPrefix = AAD_PREFIX_STRING.getBytes(StandardCharsets.UTF_8); private static final int ROW_COUNT = 10000; private static final List<SingleRow> DATA = Collections.unmodifiableList(SingleRow.generateRandomData(ROW_COUNT)); private static final List<SingleRow> LINEAR_DATA = Collections.unmodifiableList(SingleRow.generateLinearData(250)); private static final MessageType SCHEMA = SingleRow.getSchema(); private static final DecryptionKeyRetrieverMock decryptionKeyRetrieverMock = new DecryptionKeyRetrieverMock() .putKey(FOOTER_ENCRYPTION_KEY_ID, FOOTER_ENCRYPTION_KEY) .putKey(COLUMN_ENCRYPTION_KEY_IDS[0], COLUMN_ENCRYPTION_KEYS[0]) .putKey(COLUMN_ENCRYPTION_KEY_IDS[1], COLUMN_ENCRYPTION_KEYS[1]) .putKey(COLUMN_ENCRYPTION_KEY_IDS[2], COLUMN_ENCRYPTION_KEYS[2]) .putKey(COLUMN_ENCRYPTION_KEY_IDS[3], COLUMN_ENCRYPTION_KEYS[3]) .putKey(COLUMN_ENCRYPTION_KEY_IDS[4], COLUMN_ENCRYPTION_KEYS[4]) .putKey(COLUMN_ENCRYPTION_KEY_IDS[5], COLUMN_ENCRYPTION_KEYS[5]); public enum EncryptionConfiguration { UNIFORM_ENCRYPTION { /** * Encryption configuration 1: Encrypt all columns and the footer with the same key. */ public FileEncryptionProperties getEncryptionProperties() { return FileEncryptionProperties.builder(FOOTER_ENCRYPTION_KEY) .withFooterKeyMetadata(footerKeyMetadata) .build(); } }, ENCRYPT_COLUMNS_AND_FOOTER { /** * Encryption configuration 2: Encrypt six columns and the footer, with different keys. */ public FileEncryptionProperties getEncryptionProperties() { Map<ColumnPath, ColumnEncryptionProperties> columnPropertiesMap = getColumnEncryptionPropertiesMap(); return FileEncryptionProperties.builder(FOOTER_ENCRYPTION_KEY) .withFooterKeyMetadata(footerKeyMetadata) .withEncryptedColumns(columnPropertiesMap) .build(); } }, ENCRYPT_COLUMNS_PLAINTEXT_FOOTER { /** * Encryption configuration 3: Encrypt six columns, with different keys. * Don't encrypt footer. * (plaintext footer mode, readable by legacy readers) */ public FileEncryptionProperties getEncryptionProperties() { Map<ColumnPath, ColumnEncryptionProperties> columnPropertiesMap = getColumnEncryptionPropertiesMap(); return FileEncryptionProperties.builder(FOOTER_ENCRYPTION_KEY) .withFooterKeyMetadata(footerKeyMetadata) .withEncryptedColumns(columnPropertiesMap) .withPlaintextFooter() .build(); } }, ENCRYPT_COLUMNS_AND_FOOTER_AAD { /** * Encryption configuration 4: Encrypt six columns and the footer, with different keys. * Use aad_prefix. */ public FileEncryptionProperties getEncryptionProperties() { Map<ColumnPath, ColumnEncryptionProperties> columnPropertiesMap = getColumnEncryptionPropertiesMap(); return FileEncryptionProperties.builder(FOOTER_ENCRYPTION_KEY) .withFooterKeyMetadata(footerKeyMetadata) .withEncryptedColumns(columnPropertiesMap) .withAADPrefix(AADPrefix) .build(); } }, ENCRYPT_COLUMNS_AND_FOOTER_DISABLE_AAD_STORAGE { /** * Encryption configuration 5: Encrypt six columns and the footer, with different keys. * Use aad_prefix and disable_aad_prefix_storage. */ public FileEncryptionProperties getEncryptionProperties() { Map<ColumnPath, ColumnEncryptionProperties> columnPropertiesMap = getColumnEncryptionPropertiesMap(); return FileEncryptionProperties.builder(FOOTER_ENCRYPTION_KEY) .withFooterKeyMetadata(footerKeyMetadata) .withEncryptedColumns(columnPropertiesMap) .withAADPrefix(AADPrefix) .withoutAADPrefixStorage() .build(); } }, ENCRYPT_COLUMNS_AND_FOOTER_CTR { /** * Encryption configuration 6: Encrypt six columns and the footer, with different keys. * Use AES_GCM_CTR_V1 algorithm. */ public FileEncryptionProperties getEncryptionProperties() { Map<ColumnPath, ColumnEncryptionProperties> columnPropertiesMap = getColumnEncryptionPropertiesMap(); return FileEncryptionProperties.builder(FOOTER_ENCRYPTION_KEY) .withFooterKeyMetadata(footerKeyMetadata) .withEncryptedColumns(columnPropertiesMap) .withAlgorithm(ParquetCipher.AES_GCM_CTR_V1) .build(); } }, UNIFORM_ENCRYPTION_PLAINTEXT_FOOTER { /** * Encryption configuration 7: Encrypt all columns with the same key. * Don't encrypt footer. */ public FileEncryptionProperties getEncryptionProperties() { return FileEncryptionProperties.builder(FOOTER_ENCRYPTION_KEY) .withPlaintextFooter() .withFooterKeyMetadata(footerKeyMetadata) .build(); } }, ENCRYPT_COLUMNS_PLAIN_FOOTER_COMPLETE { /** * Encryption configuration 8: Encrypt six columns with different keys. * Encrypt the rest of the columns with the footer key. Don't encrypt footer. */ public FileEncryptionProperties getEncryptionProperties() { Map<ColumnPath, ColumnEncryptionProperties> columnPropertiesMap = getColumnEncryptionPropertiesMap(); return FileEncryptionProperties.builder(FOOTER_ENCRYPTION_KEY) .withFooterKeyMetadata(footerKeyMetadata) .withEncryptedColumns(columnPropertiesMap) .withCompleteColumnEncryption() .withPlaintextFooter() .build(); } }, NO_ENCRYPTION { public FileEncryptionProperties getEncryptionProperties() { return null; } }; public abstract FileEncryptionProperties getEncryptionProperties(); } public enum DecryptionConfiguration { DECRYPT_WITH_KEY_RETRIEVER { /** * Decryption configuration 1: Decrypt using key retriever callback that holds the keys * of the encrypted columns and the footer key. */ public FileDecryptionProperties getDecryptionProperties() { return FileDecryptionProperties.builder() .withKeyRetriever(decryptionKeyRetrieverMock) .build(); } }, DECRYPT_WITH_KEY_RETRIEVER_AAD { /** * Decryption configuration 2: Decrypt using key retriever callback that holds the keys * of the encrypted columns and the footer key. Supply aad_prefix. */ public FileDecryptionProperties getDecryptionProperties() { return FileDecryptionProperties.builder() .withKeyRetriever(decryptionKeyRetrieverMock) .withAADPrefix(AADPrefix) .build(); } }, DECRYPT_WITH_EXPLICIT_KEYS { /** * Decryption configuration 3: Decrypt using explicit column and footer keys. */ public FileDecryptionProperties getDecryptionProperties() { Map<ColumnPath, ColumnDecryptionProperties> columnMap = getColumnDecryptionPropertiesMap(); return FileDecryptionProperties.builder() .withColumnKeys(columnMap) .withFooterKey(FOOTER_ENCRYPTION_KEY) .build(); } }, NO_DECRYPTION { public FileDecryptionProperties getDecryptionProperties() { return null; } }; public abstract FileDecryptionProperties getDecryptionProperties(); } @Test public void testWriteReadEncryptedParquetFiles() throws IOException { this.errorCollector = localErrorCollector; Path rootPath = new Path(temporaryFolder.getRoot().getPath()); LOG.info("======== testWriteReadEncryptedParquetFiles {} ========", rootPath.toString()); byte[] AADPrefix = AAD_PREFIX_STRING.getBytes(StandardCharsets.UTF_8); // Write using various encryption configurations testWriteEncryptedParquetFiles(rootPath, DATA); // Read using various decryption configurations. testReadEncryptedParquetFiles(rootPath, DATA); } /** * This interop test should be run from a separate integration tests suite, so it's not marked with @Test. * It's not moved into a separate file since it shares many utilities with the unit tests in this file. * * @param errorCollector - the error collector of the integration tests suite * @throws IOException */ public void testInteropReadEncryptedParquetFiles(ErrorCollector errorCollector) throws IOException { this.errorCollector = errorCollector; boolean readOnlyEncrypted = true; byte[] AADPrefix = AAD_PREFIX_STRING.getBytes(StandardCharsets.UTF_8); // Read using various decryption configurations. testInteropReadEncryptedParquetFiles(readOnlyEncrypted, LINEAR_DATA); } private void testWriteEncryptedParquetFiles(Path root, List<SingleRow> data) throws IOException { Configuration conf = new Configuration(); int pageSize = data.size() / 10; // Ensure that several pages will be created int rowGroupSize = pageSize * 6 * 5; // Ensure that there are more row-groups created SimpleGroupFactory f = new SimpleGroupFactory(SCHEMA); EncryptionConfiguration[] encryptionConfigurations = EncryptionConfiguration.values(); for (EncryptionConfiguration encryptionConfiguration : encryptionConfigurations) { Path file = new Path(root, getFileName(encryptionConfiguration)); FileEncryptionProperties encryptionProperties = encryptionConfiguration.getEncryptionProperties(); LOG.info("\nWrite " + file.toString()); try (ParquetWriter<Group> writer = ExampleParquetWriter.builder(file) .withWriteMode(OVERWRITE) .withRowGroupSize(rowGroupSize) .withPageSize(pageSize) .withType(SCHEMA) .withConf(conf) .withEncryption(encryptionProperties) .build()) { for (SingleRow singleRow : data) { writer.write(f.newGroup() .append(SingleRow.BOOLEAN_FIELD_NAME, singleRow.boolean_field) .append(SingleRow.INT32_FIELD_NAME, singleRow.int32_field) .append(SingleRow.FLOAT_FIELD_NAME, singleRow.float_field) .append(SingleRow.DOUBLE_FIELD_NAME, singleRow.double_field) .append(SingleRow.BINARY_FIELD_NAME, Binary.fromConstantByteArray(singleRow.ba_field)) .append( SingleRow.FIXED_LENGTH_BINARY_FIELD_NAME, Binary.fromConstantByteArray(singleRow.flba_field)) .append(SingleRow.PLAINTEXT_INT32_FIELD_NAME, singleRow.plaintext_int32_field)); } } } } private String getFileName(EncryptionConfiguration encryptionConfiguration) { return encryptionConfiguration.toString().toLowerCase() + ".parquet.encrypted"; } private void testReadEncryptedParquetFiles(Path root, List<SingleRow> data) { Configuration conf = new Configuration(); DecryptionConfiguration[] decryptionConfigurations = DecryptionConfiguration.values(); for (DecryptionConfiguration decryptionConfiguration : decryptionConfigurations) { EncryptionConfiguration[] encryptionConfigurations = EncryptionConfiguration.values(); for (EncryptionConfiguration encryptionConfiguration : encryptionConfigurations) { Path file = new Path(root, getFileName(encryptionConfiguration)); LOG.info("==> Decryption configuration {}", decryptionConfiguration); FileDecryptionProperties fileDecryptionProperties = decryptionConfiguration.getDecryptionProperties(); LOG.info("--> Read file {} {}", file.toString(), encryptionConfiguration); // Read only the non-encrypted columns if ((decryptionConfiguration == DecryptionConfiguration.NO_DECRYPTION) && (encryptionConfiguration == EncryptionConfiguration.ENCRYPT_COLUMNS_PLAINTEXT_FOOTER)) { conf.set( "parquet.read.schema", Types.buildMessage() .optional(INT32) .named(SingleRow.PLAINTEXT_INT32_FIELD_NAME) .named("FormatTestObject") .toString()); } // Project column encrypted with footer key if ((decryptionConfiguration == DecryptionConfiguration.NO_DECRYPTION) && (encryptionConfiguration == EncryptionConfiguration.ENCRYPT_COLUMNS_PLAIN_FOOTER_COMPLETE)) { conf.set( "parquet.read.schema", Types.buildMessage() .optional(INT32) .named(SingleRow.PLAINTEXT_INT32_FIELD_NAME) .named("FormatTestObject") .toString()); } int rowNum = 0; try (ParquetReader<Group> reader = ParquetReader.builder(new GroupReadSupport(), file) .withConf(conf) .withDecryption(fileDecryptionProperties) .build()) { for (Group group = reader.read(); group != null; group = reader.read()) { SingleRow rowExpected = data.get(rowNum++); // plaintext columns if (rowExpected.plaintext_int32_field != group.getInteger(SingleRow.PLAINTEXT_INT32_FIELD_NAME, 0)) { addErrorToErrorCollectorAndLog( "Wrong int", encryptionConfiguration, decryptionConfiguration); } // encrypted columns if (decryptionConfiguration != DecryptionConfiguration.NO_DECRYPTION) { if (rowExpected.boolean_field != group.getBoolean(SingleRow.BOOLEAN_FIELD_NAME, 0)) { addErrorToErrorCollectorAndLog( "Wrong bool", encryptionConfiguration, decryptionConfiguration); } if (rowExpected.int32_field != group.getInteger(SingleRow.INT32_FIELD_NAME, 0)) { addErrorToErrorCollectorAndLog( "Wrong int", encryptionConfiguration, decryptionConfiguration); } if (rowExpected.float_field != group.getFloat(SingleRow.FLOAT_FIELD_NAME, 0)) { addErrorToErrorCollectorAndLog( "Wrong float", encryptionConfiguration, decryptionConfiguration); } if (rowExpected.double_field != group.getDouble(SingleRow.DOUBLE_FIELD_NAME, 0)) { addErrorToErrorCollectorAndLog( "Wrong double", encryptionConfiguration, decryptionConfiguration); } if ((null != rowExpected.ba_field) && !Arrays.equals( rowExpected.ba_field, group.getBinary(SingleRow.BINARY_FIELD_NAME, 0) .getBytes())) { addErrorToErrorCollectorAndLog( "Wrong byte array", encryptionConfiguration, decryptionConfiguration); } if (!Arrays.equals( rowExpected.flba_field, group.getBinary(SingleRow.FIXED_LENGTH_BINARY_FIELD_NAME, 0) .getBytes())) { addErrorToErrorCollectorAndLog( "Wrong fixed-length byte array", encryptionConfiguration, decryptionConfiguration); } } } } catch (ParquetCryptoRuntimeException e) { checkResult(file.getName(), decryptionConfiguration, e); } catch (Exception e) { e.printStackTrace(); addErrorToErrorCollectorAndLog( "Unexpected exception: " + e.getClass().getName() + " with message: " + e.getMessage(), encryptionConfiguration, decryptionConfiguration); } conf.unset("parquet.read.schema"); } } } private void testInteropReadEncryptedParquetFiles(boolean readOnlyEncrypted, List<SingleRow> data) throws IOException { Configuration conf = new Configuration(); DecryptionConfiguration[] decryptionConfigurations = DecryptionConfiguration.values(); for (DecryptionConfiguration decryptionConfiguration : decryptionConfigurations) { EncryptionConfiguration[] encryptionConfigurations = EncryptionConfiguration.values(); for (EncryptionConfiguration encryptionConfiguration : encryptionConfigurations) { if (readOnlyEncrypted && (EncryptionConfiguration.NO_ENCRYPTION == encryptionConfiguration)) { continue; } if (EncryptionConfiguration.UNIFORM_ENCRYPTION_PLAINTEXT_FOOTER == encryptionConfiguration) { continue; } if (EncryptionConfiguration.ENCRYPT_COLUMNS_PLAIN_FOOTER_COMPLETE == encryptionConfiguration) { continue; } Path file = interop.GetInterOpFile(getFileName(encryptionConfiguration), CHANGESET); LOG.info("==> Decryption configuration {}", decryptionConfiguration); FileDecryptionProperties fileDecryptionProperties = decryptionConfiguration.getDecryptionProperties(); LOG.info("--> Read file {} {}", file.toString(), encryptionConfiguration); // Read only the non-encrypted columns if ((decryptionConfiguration == DecryptionConfiguration.NO_DECRYPTION) && (encryptionConfiguration == EncryptionConfiguration.ENCRYPT_COLUMNS_PLAINTEXT_FOOTER)) { conf.set( "parquet.read.schema", Types.buildMessage() .required(BOOLEAN) .named(SingleRow.BOOLEAN_FIELD_NAME) .required(INT32) .named(SingleRow.INT32_FIELD_NAME) .named("FormatTestObject") .toString()); } int rowNum = 0; try (ParquetReader<Group> reader = ParquetReader.builder(new GroupReadSupport(), file) .withConf(conf) .withDecryption(fileDecryptionProperties) .build()) { for (Group group = reader.read(); group != null; group = reader.read()) { SingleRow rowExpected = data.get(rowNum++); // plaintext columns if (rowExpected.boolean_field != group.getBoolean(SingleRow.BOOLEAN_FIELD_NAME, 0)) { addErrorToErrorCollectorAndLog( "Wrong bool", encryptionConfiguration, decryptionConfiguration); } if (rowExpected.int32_field != group.getInteger(SingleRow.INT32_FIELD_NAME, 0)) { addErrorToErrorCollectorAndLog( "Wrong int", encryptionConfiguration, decryptionConfiguration); } // encrypted columns if (decryptionConfiguration != DecryptionConfiguration.NO_DECRYPTION) { if (rowExpected.float_field != group.getFloat(SingleRow.FLOAT_FIELD_NAME, 0)) { addErrorToErrorCollectorAndLog( "Wrong float", encryptionConfiguration, decryptionConfiguration); } if (rowExpected.double_field != group.getDouble(SingleRow.DOUBLE_FIELD_NAME, 0)) { addErrorToErrorCollectorAndLog( "Wrong double", encryptionConfiguration, decryptionConfiguration); } } } } catch (ParquetCryptoRuntimeException e) { checkResult(file.getName(), decryptionConfiguration, e); } catch (Exception e) { e.printStackTrace(); addErrorToErrorCollectorAndLog( "Unexpected exception: " + e.getClass().getName() + " with message: " + e.getMessage(), encryptionConfiguration, decryptionConfiguration); } conf.unset("parquet.read.schema"); } } } /** * Check that the decryption result is as expected. */ private void checkResult( String file, DecryptionConfiguration decryptionConfiguration, ParquetCryptoRuntimeException exception) { String errorMessage = exception.getMessage(); String exceptionMsg = (null == errorMessage ? exception.toString() : errorMessage); // Extract encryptionConfigurationNumber from the parquet file name. EncryptionConfiguration encryptionConfiguration = getEncryptionConfigurationFromFilename(file); // Encryption_configuration 5 contains aad_prefix and // disable_aad_prefix_storage. // An exception is expected to be thrown if the file is not decrypted with aad_prefix. if (encryptionConfiguration == EncryptionConfiguration.ENCRYPT_COLUMNS_AND_FOOTER_DISABLE_AAD_STORAGE) { if (decryptionConfiguration == DecryptionConfiguration.DECRYPT_WITH_KEY_RETRIEVER || decryptionConfiguration == DecryptionConfiguration.DECRYPT_WITH_EXPLICIT_KEYS) { if (!exceptionMsg.contains("AAD")) { addErrorToErrorCollectorAndLog( "Expecting AAD related exception", exceptionMsg, encryptionConfiguration, decryptionConfiguration); } else { LOG.info("Exception as expected: " + exceptionMsg); } return; } } // Decryption configuration 2 contains aad_prefix. An exception is expected to // be thrown if the file was not encrypted with the same aad_prefix. if (decryptionConfiguration == DecryptionConfiguration.DECRYPT_WITH_KEY_RETRIEVER_AAD) { if (encryptionConfiguration != EncryptionConfiguration.ENCRYPT_COLUMNS_AND_FOOTER_DISABLE_AAD_STORAGE && encryptionConfiguration != EncryptionConfiguration.ENCRYPT_COLUMNS_AND_FOOTER_AAD && encryptionConfiguration != EncryptionConfiguration.NO_ENCRYPTION) { if (!exceptionMsg.contains("AAD")) { addErrorToErrorCollectorAndLog( "Expecting AAD related exception", exceptionMsg, encryptionConfiguration, decryptionConfiguration); } else { LOG.info("Exception as expected: " + exceptionMsg); } return; } } // Last encryption_configuration has null encryptor, so parquet is plaintext. // An exception is expected to be thrown if the file is being decrypted. if (encryptionConfiguration == EncryptionConfiguration.NO_ENCRYPTION) { if ((decryptionConfiguration == DecryptionConfiguration.DECRYPT_WITH_KEY_RETRIEVER) || (decryptionConfiguration == DecryptionConfiguration.DECRYPT_WITH_KEY_RETRIEVER_AAD) || (decryptionConfiguration == DecryptionConfiguration.DECRYPT_WITH_EXPLICIT_KEYS)) { if (!exceptionMsg.endsWith("Applying decryptor on plaintext file")) { addErrorToErrorCollectorAndLog( "Expecting exception Applying decryptor on plaintext file", exceptionMsg, encryptionConfiguration, decryptionConfiguration); } else { LOG.info("Exception as expected: " + exceptionMsg); } return; } } // Decryption configuration 4 is null, so only plaintext file can be read. An exception is expected to // be thrown if the file is encrypted. if (decryptionConfiguration == DecryptionConfiguration.NO_DECRYPTION) { if ((encryptionConfiguration != EncryptionConfiguration.NO_ENCRYPTION && encryptionConfiguration != EncryptionConfiguration.ENCRYPT_COLUMNS_PLAINTEXT_FOOTER)) { if (!exceptionMsg.endsWith("No keys available") && !exceptionMsg.endsWith("Null File Decryptor") && !exceptionMsg.endsWith("Footer key unavailable")) { addErrorToErrorCollectorAndLog( "Expecting No keys available exception", exceptionMsg, encryptionConfiguration, decryptionConfiguration); } else { LOG.info("Exception as expected: " + exceptionMsg); } return; } } exception.printStackTrace(); addErrorToErrorCollectorAndLog( "Didn't expect an exception", exceptionMsg, encryptionConfiguration, decryptionConfiguration); } private EncryptionConfiguration getEncryptionConfigurationFromFilename(String file) { if (!file.endsWith(".parquet.encrypted")) { return null; } String fileNamePrefix = file.replaceFirst(".parquet.encrypted", ""); try { EncryptionConfiguration encryptionConfiguration = EncryptionConfiguration.valueOf(fileNamePrefix.toUpperCase()); return encryptionConfiguration; } catch (IllegalArgumentException e) { LOG.error("File name doesn't match any known encryption configuration: " + file); errorCollector.addError(e); return null; } } private void addErrorToErrorCollectorAndLog( String errorMessage, String exceptionMessage, EncryptionConfiguration encryptionConfiguration, DecryptionConfiguration decryptionConfiguration) { String fullErrorMessage = String.format( "%s - %s Error: %s, but got [%s]", encryptionConfiguration, decryptionConfiguration, errorMessage, exceptionMessage); errorCollector.addError(new Throwable(fullErrorMessage)); LOG.error(fullErrorMessage); } private void addErrorToErrorCollectorAndLog( String errorMessage, EncryptionConfiguration encryptionConfiguration, DecryptionConfiguration decryptionConfiguration) { String fullErrorMessage = String.format("%s - %s Error: %s", encryptionConfiguration, decryptionConfiguration, errorMessage); errorCollector.addError(new Throwable(fullErrorMessage)); LOG.error(fullErrorMessage); } private static Map<ColumnPath, ColumnEncryptionProperties> getColumnEncryptionPropertiesMap() { Map<ColumnPath, ColumnEncryptionProperties> columnPropertiesMap = new HashMap<>(); ColumnEncryptionProperties columnPropertiesDouble = ColumnEncryptionProperties.builder( SingleRow.DOUBLE_FIELD_NAME) .withKey(COLUMN_ENCRYPTION_KEYS[0]) .withKeyID(COLUMN_ENCRYPTION_KEY_IDS[0]) .build(); columnPropertiesMap.put(columnPropertiesDouble.getPath(), columnPropertiesDouble); ColumnEncryptionProperties columnPropertiesFloat = ColumnEncryptionProperties.builder( SingleRow.FLOAT_FIELD_NAME) .withKey(COLUMN_ENCRYPTION_KEYS[1]) .withKeyID(COLUMN_ENCRYPTION_KEY_IDS[1]) .build(); columnPropertiesMap.put(columnPropertiesFloat.getPath(), columnPropertiesFloat); ColumnEncryptionProperties columnPropertiesBool = ColumnEncryptionProperties.builder( SingleRow.BOOLEAN_FIELD_NAME) .withKey(COLUMN_ENCRYPTION_KEYS[2]) .withKeyID(COLUMN_ENCRYPTION_KEY_IDS[2]) .build(); columnPropertiesMap.put(columnPropertiesBool.getPath(), columnPropertiesBool); ColumnEncryptionProperties columnPropertiesInt32 = ColumnEncryptionProperties.builder( SingleRow.INT32_FIELD_NAME) .withKey(COLUMN_ENCRYPTION_KEYS[3]) .withKeyID(COLUMN_ENCRYPTION_KEY_IDS[3]) .build(); columnPropertiesMap.put(columnPropertiesInt32.getPath(), columnPropertiesInt32); ColumnEncryptionProperties columnPropertiesBinary = ColumnEncryptionProperties.builder( SingleRow.BINARY_FIELD_NAME) .withKey(COLUMN_ENCRYPTION_KEYS[4]) .withKeyID(COLUMN_ENCRYPTION_KEY_IDS[4]) .build(); columnPropertiesMap.put(columnPropertiesBinary.getPath(), columnPropertiesBinary); ColumnEncryptionProperties columnPropertiesFixed = ColumnEncryptionProperties.builder( SingleRow.FIXED_LENGTH_BINARY_FIELD_NAME) .withKey(COLUMN_ENCRYPTION_KEYS[5]) .withKeyID(COLUMN_ENCRYPTION_KEY_IDS[5]) .build(); columnPropertiesMap.put(columnPropertiesFixed.getPath(), columnPropertiesFixed); return columnPropertiesMap; } private static Map<ColumnPath, ColumnDecryptionProperties> getColumnDecryptionPropertiesMap() { Map<ColumnPath, ColumnDecryptionProperties> columnMap = new HashMap<>(); ColumnDecryptionProperties columnDecryptionPropsDouble = ColumnDecryptionProperties.builder( SingleRow.DOUBLE_FIELD_NAME) .withKey(COLUMN_ENCRYPTION_KEYS[0]) .build(); columnMap.put(columnDecryptionPropsDouble.getPath(), columnDecryptionPropsDouble); ColumnDecryptionProperties columnDecryptionPropsFloat = ColumnDecryptionProperties.builder( SingleRow.FLOAT_FIELD_NAME) .withKey(COLUMN_ENCRYPTION_KEYS[1]) .build(); columnMap.put(columnDecryptionPropsFloat.getPath(), columnDecryptionPropsFloat); ColumnDecryptionProperties columnDecryptionPropsBool = ColumnDecryptionProperties.builder( SingleRow.BOOLEAN_FIELD_NAME) .withKey(COLUMN_ENCRYPTION_KEYS[2]) .build(); columnMap.put(columnDecryptionPropsBool.getPath(), columnDecryptionPropsBool); ColumnDecryptionProperties columnDecryptionPropsInt32 = ColumnDecryptionProperties.builder( SingleRow.INT32_FIELD_NAME) .withKey(COLUMN_ENCRYPTION_KEYS[3]) .build(); columnMap.put(columnDecryptionPropsInt32.getPath(), columnDecryptionPropsInt32); ColumnDecryptionProperties columnDecryptionPropsBinary = ColumnDecryptionProperties.builder( SingleRow.BINARY_FIELD_NAME) .withKey(COLUMN_ENCRYPTION_KEYS[4]) .build(); columnMap.put(columnDecryptionPropsBinary.getPath(), columnDecryptionPropsBinary); ColumnDecryptionProperties columnDecryptionPropsFixed = ColumnDecryptionProperties.builder( SingleRow.FIXED_LENGTH_BINARY_FIELD_NAME) .withKey(COLUMN_ENCRYPTION_KEYS[5]) .build(); columnMap.put(columnDecryptionPropsFixed.getPath(), columnDecryptionPropsFixed); return columnMap; } }
oracle/nosql
36,737
kvmain/src/main/java/oracle/kv/impl/admin/plan/task/RelocateRN.java
/*- * Copyright (C) 2011, 2025 Oracle and/or its affiliates. All rights reserved. * * This file was distributed by Oracle as part of a version of Oracle NoSQL * Database made available at: * * http://www.oracle.com/technetwork/database/database-technologies/nosqldb/downloads/index.html * * Please see the LICENSE file included in the top-level directory of the * appropriate version of Oracle NoSQL Database for a copy of the license and * additional information. */ package oracle.kv.impl.admin.plan.task; import java.rmi.NotBoundException; import java.rmi.RemoteException; import java.util.Collections; import java.util.HashSet; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import oracle.kv.KVVersion; import oracle.kv.impl.admin.Admin; import oracle.kv.impl.admin.CommandResult; import oracle.kv.impl.admin.NonfatalAssertionException; import oracle.kv.impl.admin.PlanLocksHeldException; import oracle.kv.impl.admin.TopologyCheck; import oracle.kv.impl.admin.TopologyCheck.Remedy; import oracle.kv.impl.admin.param.AdminParams; import oracle.kv.impl.admin.param.ArbNodeParams; import oracle.kv.impl.admin.param.Parameters; import oracle.kv.impl.admin.param.RepNodeParams; import oracle.kv.impl.admin.param.StorageNodeParams; import oracle.kv.impl.admin.plan.AbstractPlan; import oracle.kv.impl.admin.plan.Planner; import oracle.kv.impl.admin.plan.PortTracker; import oracle.kv.impl.admin.topo.LogDirectory; import oracle.kv.impl.admin.topo.StorageDirectory; import oracle.kv.impl.fault.CommandFaultException; import oracle.kv.impl.fault.OperationFaultException; import oracle.kv.impl.param.ParameterMap; import oracle.kv.impl.rep.admin.RepNodeAdminAPI; import oracle.kv.impl.security.login.LoginManager; import oracle.kv.impl.sna.StorageNodeAgentAPI; import oracle.kv.impl.test.TestHook; import oracle.kv.impl.test.TestHookExecute; import oracle.kv.impl.topo.ArbNode; import oracle.kv.impl.topo.ArbNodeId; import oracle.kv.impl.topo.RepGroup; import oracle.kv.impl.topo.RepGroupId; import oracle.kv.impl.topo.RepNode; import oracle.kv.impl.topo.RepNodeId; import oracle.kv.impl.topo.StorageNode; import oracle.kv.impl.topo.StorageNodeId; import oracle.kv.impl.topo.Topology; import oracle.kv.impl.util.ConfigurableService.ServiceStatus; import oracle.kv.impl.util.VersionUtil; import oracle.kv.impl.util.registry.RegistryUtils; import oracle.kv.impl.util.server.LoggerUtils; import oracle.kv.util.ErrorMessage; /** * Move a single RN to a new storage node. * 1. stop/disable RN * 2. change params and topo * 3. update the other members of the rep group. * 4. broadcast the topo changes * 5. turn off the disable bit and tell the new SN to deploy the RN * 6. wait for the new RN to come up and become consistent with the shard * master, then delete the log files of the old RN. */ public class RelocateRN extends SingleJobTask { private static final long serialVersionUID = 1L; private final RepNodeId rnId; private final StorageNodeId oldSN; private final StorageNodeId newSN; /* * Note that we now use "storage directory" instead of "mount point", but * since the field is serialized it would be a pain to change. */ private final String newMountPoint; /* * If deserializing from an old version newStorageDirectorySize will * be 0. It is assumed that checks have already been made to prevent * a non-zero size before the Admins are upgraded. */ private final long newStorageDirectorySize; /* * RN log directory */ private final String newLogDirectory; /* * RN log directory size */ private final long newLogDirectorySize; private final AbstractPlan plan; /* Hook to inject failures at different points in task execution */ public static TestHook<Integer> FAULT_HOOK; public RelocateRN(AbstractPlan plan, StorageNodeId oldSN, StorageNodeId newSN, RepNodeId rnId, StorageDirectory newStorageDirectory, LogDirectory newLogDir) { super(); /* * This task does not support moving an RN within the same SN. * Additional checks would be needed to make sure the directories * are different. Also more safeguards should be added when deleting * the old RN. */ if (oldSN.equals(newSN)) { throw new NonfatalAssertionException("The RelocateRN task does " + "not support relocating to " + "the same Storage Node"); } this.oldSN = oldSN; this.newSN = newSN; this.plan = plan; this.rnId = rnId; if (newStorageDirectory == null) { newMountPoint = null; newStorageDirectorySize = 0L; } else { newMountPoint = newStorageDirectory.getPath(); newStorageDirectorySize = newStorageDirectory.getSize(); } if (newLogDir == null) { newLogDirectory = null; newLogDirectorySize = 0L; } else { newLogDirectory = newLogDir.getPath(); newLogDirectorySize = newLogDir.getSize(); } } @Override protected AbstractPlan getPlan() { return plan; } /** * Use the RNLocationCheck and the current state of the JE HA repGroupDB to * repair any inconsistencies between the AdminDB, the SNA config files, * and the JE HA repGroupDB. * @throws NotBoundException * @throws RemoteException */ private boolean checkAndRepairLocation() throws RemoteException, NotBoundException { final Admin admin = plan.getAdmin(); final Logger logger = plan.getLogger(); final TopologyCheck checker = new TopologyCheck(this.toString(), logger, admin.getCurrentTopology(), admin.getCurrentParameters()); final StorageDirectory newStorageDir = new StorageDirectory(newMountPoint, newStorageDirectorySize); /* ApplyRemedy will throw an exception if there is a problem */ /* * TODO : Check if this check needs to be done for logdirectory * in case of RN relocation elasticity operation. If yes, then will be * done in follow up code drop. */ Remedy remedy = checker.checkLocation(admin, newSN, rnId, false /* calledByDeployNewRN */, true /* makeRNEnabled */, oldSN, newStorageDir); if (!remedy.isOkay()) { logger.log(Level.INFO, "{0} check of newSN: {1}", new Object[]{this, remedy}); } final boolean newDone = checker.applyRemedy(remedy, plan); remedy = checker.checkLocation(admin, oldSN, rnId, false /* calledByDeployRN */, true /* makeRNEnabled */, oldSN, newStorageDir); if (!remedy.isOkay()) { logger.log(Level.INFO, "{0} check of oldSN: {1}", new Object[]{this, remedy}); } boolean oldDone = checker.applyRemedy(remedy, plan); return newDone && oldDone; } @Override public State doWork() throws Exception { final Admin admin = plan.getAdmin(); final Logger logger = plan.getLogger(); long stopRNTime; /* * Prevent the inadvertent downgrade of a RN version by checking * that the destination SN is a version that is >= source SN. */ try { checkVersions(); } catch (OperationFaultException e) { throw new CommandFaultException(e.getMessage(), e, ErrorMessage.NOSQL_5200, CommandResult.NO_CLEANUP_JOBS); } /* * Before doing any work, make sure that the topology, params, * SN config files, and JE HA rep group are consistent. This is * most definitive if the JE HA repGroupDB can be read, which * is only possible if there is a master of the group. The correct * location can be deduced in some other limited cases too. */ final boolean done = checkAndRepairLocation(); /* Check the topology after any fixes */ final Topology current = admin.getCurrentTopology(); RepNode rn = current.get(rnId); if (done && rn.getStorageNodeId().equals(newSN)) { /* * The check has been done, any small repairs needed were done, all * is consistent, and the RN is already living on the new * SN. Nothing more to be done with the topology and params. */ logger.log(Level.INFO, "{0} {1} is already on {2}, no additional metadata " + "changes needed.", new Object[]{this, rnId, newSN}); stopRNTime = System.currentTimeMillis(); } else { /* * There's work to do to update the topology, params, and JE HA * repGroupDB. Make sure both old and new SNs are up. */ final LoginManager loginMgr = admin.getLoginManager(); try { Utils.confirmSNStatus(current, loginMgr, logger, oldSN, true, "Please ensure that " + oldSN + " is deployed and running before " + "attempting a relocate " + rnId + "."); Utils.confirmSNStatus(current, loginMgr, logger, newSN, true, "Please ensure that " + newSN + " is deployed and running before " + "attempting a relocate " + rnId + "."); } catch (OperationFaultException e) { throw new CommandFaultException(e.getMessage(), e, ErrorMessage.NOSQL_5200, CommandResult.NO_CLEANUP_JOBS); } final RepGroupId rgId = current.get(rnId).getRepGroupId(); assert TestHookExecute.doHookIfSet(FAULT_HOOK, 1); /* Step 1. Stop and disable the RN. */ try { Utils.stopRN(plan, oldSN, rnId, true, /* await for healthy */ false /* not failure */); } catch (Exception e) { throw new CommandFaultException( e.getMessage(), e, ErrorMessage.NOSQL_5400, CommandResult.TOPO_PLAN_REPAIR); } /* * Assert that the RN's disable bit is set, because the task cleanup * implementation uses that as an indication that step 5 executed. */ final RepNodeParams rnp = admin.getRepNodeParams(rnId); if (!rnp.isDisabled()) { final String msg = "Expected disabled bit to be set " + "for " + rnId + ": " + rnp; throw new CommandFaultException(msg, new IllegalStateException(msg), ErrorMessage.NOSQL_5400, CommandResult.TOPO_PLAN_REPAIR); } stopRNTime = System.currentTimeMillis(); assert TestHookExecute.doHookIfSet(FAULT_HOOK, 2); /* Step 2. Change params and topo, as one transaction. */ changeParamsAndTopo(oldSN, newSN, rgId); assert TestHookExecute.doHookIfSet(FAULT_HOOK, 3); /* * Step 3. Tell the HA group about the new location of this * node. This requires a quorum to update the HA group db, and may * take some retrying, as step 1 might have actually shut down the * master of the HA group. */ try { Utils.changeHAAddress(admin.getCurrentTopology(), admin.getCurrentParameters(), admin.getParams().getAdminParams(), rnId, oldSN, newSN, plan); } catch (OperationFaultException e) { throw new CommandFaultException(e.getMessage(), e, ErrorMessage.NOSQL_5400, CommandResult.TOPO_PLAN_REPAIR); } assert TestHookExecute.doHookIfSet(FAULT_HOOK, 4); /* * Step 4. Send topology change to all nodes, send param changes * with updated helper hosts to RN peers */ final Topology topo = admin.getCurrentTopology(); if (!Utils.broadcastTopoChangesToRNs (logger, topo, "relocate " + rnId + " from " + oldSN + " to " + newSN, admin.getParams().getAdminParams(), plan)) { /* * The plan is interrupted before enough nodes saw the new * topology. */ return State.INTERRUPTED; } /* Send the updated params to the RN's peers */ Utils.refreshParamsOnPeers(plan, rnId); assert TestHookExecute.doHookIfSet(FAULT_HOOK, 5); /* * Step 5. Remove the disable flag for this RN, and deploy the RN on * the new SN. */ startRN(plan, newSN, rnId); assert TestHookExecute.doHookIfSet(FAULT_HOOK, 6); } /* * Step 6: Destroy the old RN. Make sure the new RN is up and is current * with its master. The RNLocationCheck repair does not do this step, * so check if it's needed at this time. */ return destroyRepNode(stopRNTime); } /** * Complain if the new SN is at an older version than the old SN. */ private void checkVersions() { final Admin admin = plan.getAdmin(); final RegistryUtils regUtils = new RegistryUtils(admin.getCurrentTopology(), admin.getLoginManager(), plan.getLogger()); final String errorMsg = " cannot be contacted. Please ensure that it " + "is deployed and running before attempting to deploy " + "this topology"; KVVersion oldVersion = null; KVVersion newVersion = null; try { StorageNodeAgentAPI oldSNA = regUtils.getStorageNodeAgent(oldSN); oldVersion = oldSNA.ping().getKVVersion(); } catch (RemoteException | NotBoundException e) { throw new OperationFaultException(oldSN + errorMsg); } try { StorageNodeAgentAPI newSNA = regUtils.getStorageNodeAgent(newSN); newVersion = newSNA.ping().getKVVersion(); } catch (RemoteException | NotBoundException e) { throw new OperationFaultException(newSN + errorMsg); } if (VersionUtil.compareMinorVersion(oldVersion, newVersion) > 0) { throw new OperationFaultException (rnId + " cannot be moved from " + oldSN + " to " + newSN + " because " + oldSN + " is at version " + oldVersion + " and " + newSN + " is at older version " + newVersion + ". Please upgrade " + newSN + " to a version that is equal or greater than " + oldVersion); } } /** * Deletes the old RN on the original SN. Returns SUCCESS if the delete was * successful. This method calls awaitCOnsistency() on the new node * to make sure it is up and healthy before deleting the old node. * * @return SUCCESS if the old RN was deleted */ private State destroyRepNode(long stopRNTime) { try { if (Utils.destroyRepNode(plan, stopRNTime, oldSN, rnId)) { return State.SUCCEEDED; } } catch (InterruptedException ie) { return State.INTERRUPTED; } final String msg = "Time out while waiting for " + rnId + " to come up on " + newSN + " and become consistent with" + " the master of the shard before deleting" + " the RepNode from its old home on " + oldSN; throw new CommandFaultException(msg, new RuntimeException(msg), ErrorMessage.NOSQL_5400, CommandResult.TOPO_PLAN_REPAIR); } /** * Start the RN, update its params. * * @throws RemoteException if a communication error occurs * @throws NotBoundException if a service is not found */ static public void startRN(AbstractPlan plan, StorageNodeId targetSNId, RepNodeId targetRNId) throws RemoteException, NotBoundException { final Admin admin = plan.getAdmin(); /* * Update the SN after any AdminDB param changes are done. Refetch * the params and topo because they might have been updated. */ final Topology topo = admin.getCurrentTopology(); final RepNodeParams rnp = new RepNodeParams(admin.getRepNodeParams(targetRNId)); if (rnp.isDisabled()) { rnp.setDisabled(false); admin.updateParams(rnp); } plan.getLogger().log(Level.INFO, "{0} starting up {1} on {2} with {3}", new Object[]{plan, targetRNId, targetSNId, rnp}); final RegistryUtils regUtils = new RegistryUtils(topo, admin.getLoginManager(), plan.getLogger()); final StorageNodeAgentAPI sna = regUtils.getStorageNodeAgent(targetSNId); /* * Update the RN's configuration file if the RN is present, since * createRepNode only updates the parameters for a new node */ final boolean rnExists = sna.repNodeExists(targetRNId); if (rnExists) { sna.newRepNodeParameters(rnp.getMap()); } /* Start or create the RN */ try { sna.createRepNode(rnp.getMap(), Utils.getMetadataSet(topo, plan)); } catch (IllegalStateException e) { throw new CommandFaultException(e.getMessage(), e, ErrorMessage.NOSQL_5200, CommandResult.NO_CLEANUP_JOBS); } /* * Refresh the repNodeAdmin parameters for an existing node in case it * was already running, since the start or create will be a no-op if * the RN was already up */ if (rnExists) { try { Utils.waitForNodeState(plan, targetRNId, ServiceStatus.RUNNING); } catch (Exception e) { throw new CommandFaultException(e.getMessage(), e, ErrorMessage.NOSQL_5400, CommandResult.TOPO_PLAN_REPAIR); } RepNodeAdminAPI rnAdmin = regUtils.getRepNodeAdmin(targetRNId); rnAdmin.newParameters(); } /* Register this repNode with the monitor. */ final StorageNode sn = topo.get(targetSNId); admin.getMonitor().registerAgent(sn.getHostname(), sn.getRegistryPort(), targetRNId); } /** * Update and persist the params and topo to make the RN refer to the new * SN. Check to see if this has already occurred, to make the work * idempotent. */ private void changeParamsAndTopo(StorageNodeId before, StorageNodeId after, RepGroupId rgId) { final Parameters parameters = plan.getAdmin().getCurrentParameters(); final Topology topo = plan.getAdmin().getCurrentTopology(); final PortTracker portTracker = new PortTracker(topo, parameters, after); /* Modify pertinent params and topo */ final StorageNodeId origParamsSN = parameters.get(rnId).getStorageNodeId(); final StorageNodeId origTopoSN = topo.get(rnId).getStorageNodeId(); final ChangedParams changedParams = transferRNParams(parameters, portTracker, topo, before, after, rgId); final boolean topoChanged = transferTopo(topo, before, after); /* * Sanity check that params and topo are in sync, both should be * either unchanged or changed */ final Set<RepNodeParams> changedRNParams = changedParams.getRNP(); final Set<ArbNodeParams> changedANParams = changedParams.getANP(); if (!changedRNParams.isEmpty() != topoChanged) { final String msg = rnId + " params and topo out of sync. Original params SN=" + origParamsSN + ", orignal topo SN=" + origTopoSN + " source SN=" + before + " destination SN=" + after; throw new CommandFaultException(msg, new IllegalStateException(msg), ErrorMessage.NOSQL_5500, CommandResult.NO_CLEANUP_JOBS); } /* Only do the update if there has been a change */ final Logger logger = plan.getLogger(); if (!(topoChanged || !changedANParams.isEmpty())) { logger.log(Level.INFO, "{0} no change to params or topology, no need to " + "update in order to move {1} from {2} to {3}", new Object[]{this, rnId, before, after}); return; } plan.getAdmin().saveTopoAndParams(topo, plan.getDeployedInfo(), changedRNParams, Collections.<AdminParams>emptySet(), changedANParams, plan); logger.log(Level.INFO, "{0} pdating params and topo for move of {1} from " + "{2} to {3}: {4}", new Object[]{this, rnId, before, after, changedRNParams}); } /** * The params fields that have to be updated are: * For the RN that is to be moved: * a. new JE HA nodehostport value * b. new mount point * c. new storage node id * d. calculate JE cache size, which may change due to the capacity * and memory values of the destination storage node. * For the other RNs in this shard: * a. new helper host values, that point to this new location for our * relocated RN */ private ChangedParams transferRNParams(Parameters parameters, PortTracker portTracker, Topology topo, StorageNodeId before, StorageNodeId after, RepGroupId rgId) { final Set<RepNodeParams> changed = new HashSet<>(); final Set<ArbNodeParams> changedArbp = new HashSet<>(); final RepNodeParams rnp = parameters.get(rnId); final ParameterMap policyMap = parameters.copyPolicies(); if (rnp.getStorageNodeId().equals(after)) { /* * We're done, this task ran previously. Note that this does not * notice if a RN is on the same SN, but its mount point has * changed. In R2, we deliberately do not yet support automatic * movement of RNs across mount points on the same SN; it's left * to the user to do manually. */ plan.getLogger().log(Level.INFO, "{0} {1} already transferred to {2}", new Object[]{this, rnId, after}); return new ChangedParams(changedArbp, changed); } /* * Sanity check -- this RNP should be pointing to the before SN, not * to some third party SN! */ if (!rnp.getStorageNodeId().equals(before)) { final String msg = "Attempted to transfer " + rnId + " from " + before + " to " + after + " but unexpectedly found it residing on " + rnp.getStorageNodeId(); throw new CommandFaultException(msg, new OperationFaultException(msg), ErrorMessage.NOSQL_5500, CommandResult.NO_CLEANUP_JOBS); } /* * Change the SN, helper hosts, nodeHostPort, storage directory and * log directory for this RN */ final int haPort = portTracker.getNextPort(after); final String newSNHAHostname = parameters.get(after).getHAHostname(); final String oldNodeHostPort = rnp.getJENodeHostPort(); final String nodeHostPort = newSNHAHostname + ":" + haPort; plan.getLogger().log(Level.INFO, "{0} transferring HA port for {1} from {2} to {3}", new Object[]{this, rnp.getRepNodeId(), oldNodeHostPort, nodeHostPort}); rnp.setStorageNodeId(after); rnp.setJENodeHostPort(nodeHostPort); rnp.setStorageDirectory(newMountPoint, newStorageDirectorySize); rnp.setLogDirectory(newLogDirectory, newLogDirectorySize); /* * Setting the helper hosts is not strictly necessary, as it should * not have changed, but take this opportunity to update the helper * list in case a previous param change had been interrupted. */ rnp.setJEHelperHosts( Utils.findHelpers(rnId, parameters, topo)); /* * Update the RN heap, JE cache size, and parallelGCThreads params, * which are a function of the characteristics of the hosting storage * node */ final StorageNodeParams snp = parameters.get(after); Utils.setRNPHeapCacheGC(policyMap, snp, rnp, topo); changed.add(rnp); /* Change the helper hosts for other RNs in the group. */ for (RepNode peer : topo.get(rgId).getRepNodes()) { final RepNodeId peerId = peer.getResourceId(); if (peerId.equals(rnId)) { continue; } final RepNodeParams peerParam = parameters.get(peerId); final String oldHelper = peerParam.getJEHelperHosts(); final String newHelpers = oldHelper.replace(oldNodeHostPort, nodeHostPort); peerParam.setJEHelperHosts(newHelpers); changed.add(peerParam); } for (ArbNode peer : topo.get(rgId).getArbNodes()) { final ArbNodeId peerId = peer.getResourceId(); final ArbNodeParams peerParam = parameters.get(peerId); final String oldHelper = peerParam.getJEHelperHosts(); final String newHelpers = oldHelper.replace(oldNodeHostPort, nodeHostPort); peerParam.setJEHelperHosts(newHelpers); changedArbp.add(peerParam); } return new ChangedParams(changedArbp, changed); } /** * Find all RepNodes that refer to the old node, and update the topology to * refer to the new node. * @return true if a change has been made, return false if the RN is already * on the new SN. */ private boolean transferTopo(Topology topo, StorageNodeId before, StorageNodeId after) { final RepNode rn = topo.get(rnId); final StorageNodeId inUseSNId = rn.getStorageNodeId(); if (inUseSNId.equals(before)) { final RepNode updatedRN = new RepNode(after); final RepGroup rg = topo.get(rn.getRepGroupId()); rg.update(rn.getResourceId(),updatedRN); return true; } if (inUseSNId.equals(after)) { return false; } final String msg = rn + " expected to be on old SN " + before + " or new SN " + after + " but instead is on " + inUseSNId; throw new CommandFaultException(msg, new IllegalStateException(msg), ErrorMessage.NOSQL_5500, CommandResult.NO_CLEANUP_JOBS); } @Override public boolean continuePastError() { return false; } @Override public Runnable getCleanupJob() { return new Runnable() { @Override public void run() { try { cleanupRelocation(); } catch (Exception e) { plan.getLogger().log (Level.SEVERE, "{0}: problem when cancelling relocation {1}", new Object[] {this, LoggerUtils.getStackTrace(e)}); /* * Don't try to continue with cleanup; a problem has * occurred. Future, additional invocations of the plan * will have to figure out the context and do cleanup. */ throw new RuntimeException(e); } } }; } /** * Do the minimum cleanup : when this task ends, check * - the kvstore metadata as known by the admin (params, topo) * - the configuration information, including helper hosts, as stored in * the SN config file * - the JE HA groupdb * and attempt to leave it all consistent. Do not necessarily try to revert * to the topology before the task. * @throws NotBoundException * @throws RemoteException */ private void cleanupRelocation() throws RemoteException, NotBoundException { assert TestHookExecute.doHookIfSet(FAULT_HOOK, 7); final boolean done = checkAndRepairLocation(); final Topology current = plan.getAdmin().getCurrentTopology(); final RepNode rn = current.get(rnId); if (done) { if (rn.getStorageNodeId().equals(newSN)) { plan.getLogger().log(Level.INFO, "{0} cleanup, shard is " + " consistent, {1} is on the target {2}", new Object[]{this, rnId, newSN}); /* attempt to delete the old RN */ destroyRepNode(System.currentTimeMillis()); } plan.getLogger().log(Level.INFO, "{0} cleanup, shard is " + "consistent, {1} is on {2}", new Object[]{this, rnId, rn.getStorageNodeId()}); } else { plan.getLogger().log(Level.INFO, "{0} cleanup, shard did not have " + "master, no cleanup attempted since " + "authoritative information is lacking", this); } } /** * This is the older style cleanup, which attempts to reason about how far * the task proceeded, and then attempts to revert to the previous state. */ @SuppressWarnings("unused") private boolean checkLocationConsistency() throws InterruptedException, RemoteException, NotBoundException { final Admin admin = plan.getAdmin(); assert TestHookExecute.doHookIfSet(FAULT_HOOK, 7); /* * If step 5 occurred (enable bit on, RN pointing to new SN, then the HA * group and the params/topo are consistent, so attempt to delete the * old RN. */ final RepNodeParams rnp = admin.getRepNodeParams(rnId); if ((rnp.getStorageNodeId().equals(newSN)) && !rnp.isDisabled()) { return destroyRepNode(System.currentTimeMillis()) == State.SUCCEEDED; } /* * If the RepNodeParams still point at the old SN, steps 2 and 3 did * not occur, nothing to clean up */ if (rnp.getStorageNodeId().equals(oldSN)) { /* * If the original RN was disabled, attempt to re-enable it. Note * that this may enable a node which was disabled before the plan * run. */ if (rnp.isDisabled()) { Utils.startRN(plan, oldSN, rnId); } return true; } /* * We are somewhere between steps 1 and 5. Revert both of the kvstore * params and topo, and the JE HA update, and the peer RNs helper * hosts. */ Topology topo = admin.getCurrentTopology(); changeParamsAndTopo(newSN, oldSN, topo.get(rnId).getRepGroupId()); Utils.refreshParamsOnPeers(plan, rnId); Utils.changeHAAddress(topo, admin.getCurrentParameters(), admin.getParams().getAdminParams(), rnId, newSN, oldSN, plan); /* refresh the topo, it's been updated */ topo = admin.getCurrentTopology(); if (!Utils.broadcastTopoChangesToRNs(plan.getLogger(), topo, "revert relocation of " + rnId + " and move back from " + newSN + " to " + oldSN, admin.getParams().getAdminParams(), plan)) { /* * The plan is interrupted before enough nodes saw the new * topology. */ return false; } return true; } @Override public StringBuilder getName(StringBuilder sb) { final StorageNodeParams snpOld = (plan.getAdmin() != null ? plan.getAdmin().getStorageNodeParams(oldSN) : null); final StorageNodeParams snpNew = (plan.getAdmin() != null ? plan.getAdmin().getStorageNodeParams(newSN) : null); return super.getName(sb).append(" move ").append(rnId) .append(" from ") .append(snpOld != null ? snpOld.displaySNIdAndHost() : oldSN) .append(" to ") .append(snpNew != null ? snpNew.displaySNIdAndHost() : newSN); } @Override public void acquireLocks(Planner planner) throws PlanLocksHeldException { LockUtils.lockRG(planner, plan, new RepGroupId(rnId.getGroupId())); LockUtils.lockSN(planner, plan, oldSN); LockUtils.lockSN(planner, plan, newSN); } class ChangedParams { private final Set<ArbNodeParams> anParams; private final Set<RepNodeParams> rnParams; ChangedParams(Set<ArbNodeParams> anp, Set<RepNodeParams> rnp) { anParams = anp; rnParams = rnp; } Set<ArbNodeParams> getANP() { return anParams; } Set<RepNodeParams> getRNP() { return rnParams; } } }
google/s2-geometry-library-java
36,661
library/src/com/google/common/geometry/S2CellUnion.java
/* * Copyright 2005 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.geometry; import static com.google.common.geometry.S2CellId.isFace; import static com.google.common.geometry.S2CellId.parentAsLong; import static com.google.common.geometry.S2CellId.rangeMaxAsLong; import static com.google.common.geometry.S2CellId.rangeMinAsLong; import static com.google.common.geometry.S2CellId.unsignedLongLessThan; import static java.lang.Math.max; import static java.lang.Math.min; import com.google.common.base.Preconditions; import com.google.common.collect.Iterables; import com.google.common.geometry.PrimitiveArrays.Bytes; import com.google.common.geometry.PrimitiveArrays.Cursor; import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import jsinterop.annotations.JsConstructor; import jsinterop.annotations.JsIgnore; import jsinterop.annotations.JsMethod; import jsinterop.annotations.JsType; /** * An S2CellUnion is a region consisting of cells of various sizes. Typically a cell union is used * to approximate some other shape. There is a tradeoff between the accuracy of the approximation * and how many cells are used. Unlike polygons, cells have a fixed hierarchical structure. This * makes them more suitable for optimizations based on preprocessing. * * <p>An S2CellUnion is represented as a vector of sorted, non-overlapping S2CellIds. By default the * vector is also "normalized", meaning that groups of 4 child cells have been replaced by their * parent cell whenever possible. S2CellUnions are not required to be normalized, but certain * operations will return different results if they are not, e.g. {@link #contains(S2CellUnion)}. * * @author danieldanciu (Daniel Danciu) ported from util/geometry * @author ericv@google.com (Eric Veach) original author */ @JsType @SuppressWarnings("Assertion") public class S2CellUnion implements S2Region, Iterable<S2CellId>, Serializable { private static final long serialVersionUID = 1L; private static final byte LOSSLESS_ENCODING_VERSION = 1; /** An {@link S2Coder} of cell unions that uses {@link #encode} and {@link #decode}. */ public static final S2Coder<S2CellUnion> FAST_CODER = new S2Coder<S2CellUnion>() { @JsIgnore // OutputStream is not available to J2CL. @Override public void encode(S2CellUnion value, OutputStream output) throws IOException { value.encode(output); } @Override public S2CellUnion decode(Bytes data, Cursor cursor) throws IOException { return S2CellUnion.decode(data.toInputStream(cursor)); } @Override public boolean isLazy() { return false; } }; /** A compact coder that compresses the given cells by around 4-5x in many cases. */ public static final S2Coder<S2CellUnion> COMPACT_CODER = S2CellIdVectorCoder.INSTANCE.delegating(cells -> cells.cellIds, S2CellUnion::copyFrom); /** The CellIds that form the Union */ private ArrayList<S2CellId> cellIds = new ArrayList<>(); @JsConstructor public S2CellUnion() {} /** Clears the union contents, leaving it empty. */ public void clear() { cellIds.clear(); } /** Creates a new cell union as as copy of the given cell union 'other'. */ public static S2CellUnion copyFrom(S2CellUnion other) { S2CellUnion copy = new S2CellUnion(); copy.initRawCellIds(new ArrayList<>(other.cellIds)); return copy; } /** Creates a new cell union from a copy of the given cells. */ @JsIgnore // J2CL warning "Iterable<S2CellId> ... is not usable by JavaScript" but not clear why. public static S2CellUnion copyFrom(Iterable<S2CellId> cells) { S2CellUnion result = new S2CellUnion(); // Note that if 'cells' are an AbstractList over lazily decoded data, addAll may throw // NoSuchElementException. Iterables.addAll(result.cellIds, cells); return result; } /** Constructs a cell union for the whole sphere. */ public static S2CellUnion wholeSphere() { return S2CellUnion.copyFrom(Arrays.asList(S2CellId.FACE_CELLS)); } /** * Populates this cell union with the given S2CellIds, and then calls normalize(). This directly * uses the input list, without copying it. */ @CanIgnoreReturnValue public S2CellUnion initFromCellIds(ArrayList<S2CellId> cellIds) { initRawCellIds(cellIds); normalize(); return this; } /** Populates this cell union with the given 64-bit cell ids, and then calls normalize(). */ @CanIgnoreReturnValue public S2CellUnion initFromIds(List<Long> cellIds) { initRawIds(cellIds); normalize(); return this; } /** * Populates this cell union with the given S2CellIds. The input list is copied, and then cleared. */ @CanIgnoreReturnValue public S2CellUnion initSwap(List<S2CellId> cellIds) { initRawSwap(cellIds); normalize(); return this; } /** * Populates this cell union with the given S2CellIds. This does not call normalize, see {@link * #initRawSwap} for details. This directly uses the input list, without copying it. */ @CanIgnoreReturnValue public S2CellUnion initRawCellIds(ArrayList<S2CellId> cellIds) { this.cellIds = cellIds; return this; } /** Populates this cell union with the single given 64 bit cell id, which must be valid. */ @CanIgnoreReturnValue public S2CellUnion initFromId(long cellId) { assert S2CellId.isValid(cellId); cellIds.clear(); cellIds.add(new S2CellId(cellId)); return this; } /** Populates this cell union with the single given S2CellId, which must be valid. */ @CanIgnoreReturnValue public S2CellUnion initFromCellId(S2CellId cellId) { assert cellId.isValid(); cellIds.clear(); cellIds.add(cellId); return this; } /** * Populates this cell union with the given 64 bit cell ids. This does not call normalize, see * {@link #initRawSwap} for details. The input list is copied. */ // TODO(user): Make a constructed S2CellUnion immutable, and port other init methods from // C++. @CanIgnoreReturnValue public S2CellUnion initRawIds(List<Long> cellIds) { int size = cellIds.size(); this.cellIds = new ArrayList<>(size); for (Long id : cellIds) { this.cellIds.add(new S2CellId(id)); } return this; } /** * Like the initFrom*() constructors, but does not call normalize(). The cell union *must* be * normalized before doing any calculations with it, so it is the caller's responsibility to make * sure that the input is normalized. This method is useful when converting cell unions to another * representation and back. * * <p>The input list is copied, and then cleared. */ public void initRawSwap(List<S2CellId> cellIds) { this.cellIds = new ArrayList<>(cellIds); cellIds.clear(); } /** * Create a cell union that corresponds to a continuous range of cell ids. The output is a * normalized collection of cell ids that covers the leaf cells between "minId" and "maxId" * inclusive. * * <p>Requires that {@code minId.isLeaf(), maxId.isLeaf()}, and {@code minId <= maxId}. */ public void initFromMinMax(S2CellId minId, S2CellId maxId) { assert minId.isLeaf(); assert maxId.isLeaf(); assert minId.compareTo(maxId) <= 0; assert minId.isValid() && maxId.isValid(); initFromBeginEnd(minId, maxId.next()); } /** * As {@link #initFromMinMax(S2CellId, S2CellId)}, except that the union covers the range of leaf * cells from "begin" (inclusive) to "end" (exclusive.) If {@code begin.equals(end)}, the result * is empty. * * <p>Requires that {@code begin.isLeaf(), end.isLeaf()}, and {@code begin <= end}. */ public void initFromBeginEnd(S2CellId begin, S2CellId end) { assert begin.isLeaf(); assert end.isLeaf(); assert begin.compareTo(end) <= 0; // We repeatedly add the largest cell we can, in sorted order. cellIds.clear(); for (S2CellId nextBegin = begin; nextBegin.compareTo(end) < 0; ) { assert nextBegin.isLeaf(); // Find the largest cell that starts at "nextBegin" and ends before "end". This loop uses // longs rather than S2CellIds to avoid many allocations of S2CellId objects. long nextId = nextBegin.id(); while (!isFace(nextId) && rangeMinAsLong(parentAsLong(nextId)) == nextBegin.id() && unsignedLongLessThan(rangeMaxAsLong(parentAsLong(nextId)), end.id())) { nextId = parentAsLong(nextId); } S2CellId nextCellId = new S2CellId(nextId); cellIds.add(nextCellId); nextBegin = nextCellId.rangeMax().next(); } // The output should already be sorted and normalized. assert !normalize(); } public int size() { return cellIds.size(); } /** Convenience methods for accessing the individual cell ids. */ public S2CellId cellId(int i) { return cellIds.get(i); } /** * Provides an S2Iterator over this union's cells. The cell union must be normalized, or results * will be undefined. */ public S2Iterator<S2CellId> s2Iterator() { assert isNormalized(); return S2Iterator.fromList(cellIds); } /** Enable iteration over the union's cells. See also {@link #s2Iterator()}. */ @Override @JsIgnore public Iterator<S2CellId> iterator() { return cellIds.iterator(); } /** Direct access to the underlying vector for iteration . */ public ArrayList<S2CellId> cellIds() { return cellIds; } /** Returns true if the cell union is empty. */ public boolean isEmpty() { return cellIds.isEmpty(); } /** * Returns true if the cell union is valid, meaning that the S2CellIds are non-overlapping and * sorted in increasing order. */ public boolean isValid() { for (int i = 1; i < cellIds.size(); i++) { if (cellIds.get(i - 1).rangeMax().compareTo(cellIds.get(i).rangeMin()) >= 0) { return false; } } return true; } /** * Returns true if the cell union is normalized, meaning that it {@link #isValid()} is true and * that no four cells at the same level have a common parent. * * <p>Certain operations such as {@link #contains(S2CellUnion)} may return a different result if * the cell union is not normalized. */ public boolean isNormalized() { for (int i = 1; i < cellIds.size(); i++) { if (cellIds.get(i - 1).rangeMax().compareTo(cellIds.get(i).rangeMin()) >= 0) { return false; } if (i >= 3 && areSiblings( cellIds.get(i - 3), cellIds.get(i - 2), cellIds.get(i - 1), cellIds.get(i))) { return false; } } return true; } /** * Returns true if the given four cells are at the same level and have a common parent. * * <p>Requires the four cells are distinct. */ private static boolean areSiblings(S2CellId a, S2CellId b, S2CellId c, S2CellId d) { // A necessary (but not sufficient) condition is that the XOR of the four cells must be zero. // This is also very fast to test. if ((a.id() ^ b.id() ^ c.id()) != d.id()) { return false; } // Now we do a slightly more expensive but exact test. First, compute a mask that blocks out the // two bits that encode the child position of "id" with respect to its parent, then check that // the other three children all agree with "mask". long mask = d.lowestOnBit() << 1; mask = ~(mask + (mask << 1)); long idMasked = d.id() & mask; return !d.isFace() && (a.id() & mask) == idMasked && (b.id() & mask) == idMasked && (c.id() & mask) == idMasked; } /** * Returns a list of the cell ids in this cell union after replacing any cells whose level is less * than "minLevel" with their children, until the required minLevel is reached. The provided * minLevel must be in the range [0, S2CellId.MAX_LEVEL]. */ public List<S2CellId> denormalized(int minLevel) { ArrayList<S2CellId> output = new ArrayList<>(); denormalize(minLevel, 1, output); return output; } /** * Replaces "output" with an expanded version of the cell union where any cells whose level is * less than "minLevel" are replaced by their children, until the required minLevel is reached. * * <p>The provided minLevel must be in the range [0, S2CellId.MAX_LEVEL]. */ @JsIgnore public void denormalize(int minLevel, ArrayList<S2CellId> output) { denormalize(minLevel, 1, output); } /** * Replaces "output" with an expanded version of the cell union where any cells whose level is * less than "minLevel" or where (level - minLevel) is not a multiple of "levelMod" are replaced * by their children, until either both of these conditions are satisfied or the maximum level is * reached. * * <p>This method allows a covering generated by S2RegionCoverer using minLevel() or levelMod() * constraints to be stored as a normalized cell union (which allows various geometric * computations to be done) and then converted back to the original list of cell ids that * satisfies the desired constraints. * * <p>The provided minLevel must be in the range [0, S2CellId.MAX_LEVEL]. The provided levelMod * must be in the range [1, 3]. */ public void denormalize(int minLevel, int levelMod, ArrayList<S2CellId> output) { assert minLevel >= 0; assert minLevel <= S2CellId.MAX_LEVEL; assert levelMod >= 1; assert levelMod <= 3; output.clear(); output.ensureCapacity(size()); for (S2CellId id : this) { int level = id.level(); int newLevel = max(minLevel, level); if (levelMod > 1) { // Round up so that (newLevel - minLevel) is a multiple of levelMod. // (Note that S2CellId.MAX_LEVEL is a multiple of 1, 2, and 3.) newLevel += (S2CellId.MAX_LEVEL - (newLevel - minLevel)) % levelMod; newLevel = min(S2CellId.MAX_LEVEL, newLevel); } if (newLevel == level) { output.add(id); } else { S2CellId end = id.childEnd(newLevel); for (id = id.childBegin(newLevel); !id.equals(end); id = id.next()) { output.add(id); } } } } /** * If there are more than "excess" elements of the cellIds() vector that are allocated but unused, * reallocate the array to eliminate the excess space. This reduces memory usage when many cell * unions need to be held in memory at once. */ public void pack() { cellIds.trimToSize(); } /** * Return true if the cell union contains the given cell id. Containment is defined with respect * to regions, e.g. a cell contains its 4 children. This is a fast operation (logarithmic in the * size of the cell union). * * <p>CAVEAT: If you have constructed a valid but non-normalized S2CellUnion, note that groups of * 4 child cells are <em>not</em> considered to contain their parent cell. To get this behavior * you must construct a normalized cell union, or call {@link #normalize()} prior to this method. */ @JsMethod(name = "containsCellId") public boolean contains(S2CellId id) { // This is an exact test. Each cell occupies a linear span of the S2 space-filling curve, and // the cell id is simply the position at the center of this span. The cell union ids are sorted // in increasing order along the space-filling curve. So we simply find the pair of cell ids // that surround the given cell id (using binary search). There is containment if and only if // one of these two cell ids contains this cell. int pos = Collections.binarySearch(cellIds, id); if (pos < 0) { pos = -pos - 1; } if (pos < cellIds.size() && cellIds.get(pos).rangeMin().lessOrEquals(id)) { return true; } return pos != 0 && cellIds.get(pos - 1).rangeMax().greaterOrEquals(id); } /** * Return true if the cell union intersects the given cell id. This is a fast operation * (logarithmic in the size of the cell union). */ @JsMethod(name = "intersectsCellId") public boolean intersects(S2CellId id) { // This is an exact test; see the comments for contains(S2CellId) above. int pos = Collections.binarySearch(cellIds, id); if (pos < 0) { pos = -pos - 1; } if (pos < cellIds.size() && cellIds.get(pos).rangeMin().lessOrEquals(id.rangeMax())) { return true; } return pos != 0 && cellIds.get(pos - 1).rangeMax().greaterOrEquals(id.rangeMin()); } /** * Returns true if this cell union contains {@code that}. * * <p>CAVEAT: If you have constructed a valid but non-normalized S2CellUnion, note that groups of * 4 child cells are <em>not</em> considered to contain their parent cell. To get this behavior * you must construct a normalized cell union, or call {@link #normalize()} prior to this method. */ public boolean contains(S2CellUnion that) { S2CellUnion result = new S2CellUnion(); result.getIntersection(this, that); return result.cellIds.equals(that.cellIds); } /** This is a fast operation (logarithmic in the size of the cell union). */ @Override @JsMethod(name = "containsCell") public boolean contains(S2Cell cell) { return contains(cell.id()); } /** Return true if this cell union intersects {@code union}. */ public boolean intersects(S2CellUnion union) { S2CellUnion result = new S2CellUnion(); result.getIntersection(this, union); return result.size() > 0; } /** Returns the union of two S2CellUnions. */ public static S2CellUnion union(S2CellUnion x, S2CellUnion y) { S2CellUnion result = new S2CellUnion(); result.getUnion(x, y); return result; } /** * Sets this cell union to the union of {@code x} and {@code y}, which both must be different cell * unions than this one. */ @SuppressWarnings("ReferenceEquality") // Precondition check is checking reference equality. public void getUnion(S2CellUnion x, S2CellUnion y) { Preconditions.checkArgument(x != this); Preconditions.checkArgument(y != this); cellIds.clear(); cellIds.ensureCapacity(x.size() + y.size()); cellIds.addAll(x.cellIds); cellIds.addAll(y.cellIds); normalize(); } /** * Specialized version of getIntersection() that gets the intersection of a cell union with the * given cell id. This can be useful for "splitting" a cell union into chunks. * * <p><b>Note:</b> {@code x} must be normalized, and must be a different cell union than this one. */ @SuppressWarnings("ReferenceEquality") // Precondition check is checking reference equality. public void getIntersection(S2CellUnion x, S2CellId id) { Preconditions.checkArgument(x != this); cellIds.clear(); if (x.contains(id)) { cellIds.add(id); } else { int pos = Collections.binarySearch(x.cellIds, id.rangeMin()); if (pos < 0) { pos = -pos - 1; } S2CellId idmax = id.rangeMax(); int size = x.cellIds.size(); while (pos < size && x.cellIds.get(pos).lessOrEquals(idmax)) { cellIds.add(x.cellIds.get(pos++)); } } assert isNormalized() || !x.isNormalized(); } /** Returns the intersection of two S2CellUnions. */ public static S2CellUnion intersection(S2CellUnion x, S2CellUnion y) { S2CellUnion result = new S2CellUnion(); result.getIntersection(x, y); return result; } /** * Initializes this cell union to the intersection of the two given cell unions. Requires: x != * this and y != this. * * <p><b>Note:</b> {@code x} and {@code y} must both be normalized to ensure the output is * normalized. */ @JsMethod(name = "getIntersectionCellUnion") @SuppressWarnings("ReferenceEquality") // Precondition check is checking reference equality. public void getIntersection(S2CellUnion x, S2CellUnion y) { // It's fine if 'this' and 'x' or 'y' are different cell unions with the same cells, but they // may not be the same object. Preconditions.checkArgument(x != this && y != this); getIntersection(x.cellIds, y.cellIds, cellIds); // The output is normalized as long as both inputs are normalized. assert isNormalized() || (!x.isNormalized() || !y.isNormalized()); } /** * Like {@code #getIntersection(S2CellUnion, S2CellUnion)}, but works directly with lists of * S2CellIds, and this method has slightly more relaxed normalization requirements: the input * vectors may contain groups of 4 child cells that all have the same parent. (In a normalized * S2CellUnion, such groups are always replaced by the parent cell.) * * <p><b>Note:</b> {@code x} and {@code y} must be sorted. */ @JsIgnore public static void getIntersection(List<S2CellId> x, List<S2CellId> y, List<S2CellId> results) { assert x != results; assert y != results; // This is a fairly efficient calculation that uses binary search to skip over sections of both // input vectors. It takes constant time if all the cells of "x" come before or after all the // cells of "y" in S2CellId order. results.clear(); int i = 0; int j = 0; while (i < x.size() && j < y.size()) { S2CellId xCell = x.get(i); S2CellId xMin = xCell.rangeMin(); S2CellId yCell = y.get(j); S2CellId yMin = yCell.rangeMin(); if (xMin.greaterThan(yMin)) { // Either j.contains(xCell) or the two cells are disjoint. if (xCell.lessOrEquals(yCell.rangeMax())) { results.add(xCell); i++; } else { // Advance "j" to the first cell possibly contained by xCell. j = indexedBinarySearch(y, xMin, j + 1); // The previous cell (j-1) may now contain xCell. if (xCell.lessOrEquals(y.get(j - 1).rangeMax())) { --j; } } } else if (yMin.greaterThan(xMin)) { // Identical to the code above with "i" and "j" reversed. if (yCell.lessOrEquals(xCell.rangeMax())) { results.add(yCell); j++; } else { i = indexedBinarySearch(x, yMin, i + 1); if (yCell.lessOrEquals(x.get(i - 1).rangeMax())) { --i; } } } else { // "i" and "j" have the same rangeMin(), so one contains the other. if (xCell.lessThan(yCell)) { results.add(xCell); i++; } else { results.add(yCell); j++; } } } } /** Initializes this cell union to the difference of the two given cell unions. */ public void getDifference(S2CellUnion x, S2CellUnion y) { // TODO(user): this is approximately O(N*log(N)), but could probably use similar // techniques as getIntersection() to be more efficient. cellIds.clear(); for (S2CellId id : x) { getDifferenceInternal(id, y); } // The output is normalized as long as the first argument is normalized. assert isNormalized() || !x.isNormalized(); } private void getDifferenceInternal(S2CellId cell, S2CellUnion y) { // Add the difference between cell and y to cellIds. If they intersect but the difference is // non-empty, divide and conquer. if (!y.intersects(cell)) { cellIds.add(cell); } else if (!y.contains(cell)) { for (int i = 0; i < 4; i++) { getDifferenceInternal(cell.child(i), y); } } } /** * Just as normal binary search, except that it allows specifying the starting value for the lower * bound. * * @return The position of the searched element in the list (if found), or the position where the * element could be inserted without violating the order. */ private static int indexedBinarySearch(List<S2CellId> l, S2CellId key, int low) { int high = l.size() - 1; while (low <= high) { int mid = (low + high) >> 1; S2CellId midVal = l.get(mid); int cmp = midVal.compareTo(key); if (cmp < 0) { low = mid + 1; } else if (cmp > 0) { high = mid - 1; } else { return mid; // key found } } return low; // key not found } /** * Expands the cell union by adding a buffer of cells at "expandLevel" around the union boundary. * * <p>For each cell "c" in the union, we add all neighboring cells at level "expandLevel" that are * adjacent to "c". Note that there can be many such cells if "c" is large compared to * "expandLevel". If "c" is smaller than "expandLevel", we first add the parent of "c" at * "expandLevel", and then add all the neighbors of that cell. Note that this can cause the * expansion around such cells to be up to almost twice as large as expected, because the union * boundary is moved outward by up to the size difference between "c" and its parent at * "expandLevel", (depending on the position of "c" in that parent) *before* the buffering * neighbors are added. * * <p>Note that the size of the output is exponential in "level". For example, if level == 20 and * the input has a cell at level 10, there will be on the order of 4000 adjacent cells in the * output. For most applications the expand(minRadius, maxLevelDiff) method below is easier to * use. */ @JsMethod(name = "expandAtLevel") public void expand(int expandLevel) { ArrayList<S2CellId> output = new ArrayList<>(); long levelLsb = S2CellId.lowestOnBitForLevel(expandLevel); for (int i = size(); --i >= 0; ) { S2CellId id = cellId(i); if (id.lowestOnBit() < levelLsb) { id = id.parent(expandLevel); // Optimization: skip over any cells contained by this one. This is especially important // when very small regions are being expanded. while (i > 0 && id.contains(cellId(i - 1))) { --i; } } output.add(id); id.getAllNeighbors(expandLevel, output); } initSwap(output); } /** * Expand the cell union such that it contains all points whose distance to the cell union is at * most minRadius, but do not use cells that are more than maxLevelDiff levels higher than the * largest cell in the input. The second parameter controls the tradeoff between accuracy and * output size when a large region is being expanded by a small amount (e.g. expanding Canada by * 1km). * * <p>For example, if maxLevelDiff == 4, the region will always be expanded by approximately 1/16 * the width of its largest cell. Note that in the worst case, the number of cells in the output * can be up to 4 * (1 + 2 ** maxLevelDiff) times larger than the number of cells in the input. */ public void expand(S1Angle minRadius, int maxLevelDiff) { int minLevel = S2CellId.MAX_LEVEL; for (S2CellId id : this) { minLevel = min(minLevel, id.level()); } // Find the maximum level such that all cells are at least "minRadius" wide. int radiusLevel = S2Projections.MIN_WIDTH.getMaxLevel(minRadius.radians()); if (radiusLevel == 0 && minRadius.radians() > S2Projections.MIN_WIDTH.getValue(0)) { // The requested expansion is greater than the width of a face cell. The easiest way to handle // this is to expand twice. expand(0); } expand(min(minLevel + maxLevelDiff, radiusLevel)); } @Override public S2Cap getCapBound() { // Compute the approximate centroid of the region. This won't produce the bounding cap of // minimal area, but it should be close enough. if (cellIds.isEmpty()) { return S2Cap.empty(); } S2Point centroid = S2Point.ZERO; for (S2CellId id : this) { double area = S2Cell.averageArea(id.level()); centroid = centroid.add(id.toPoint().mul(area)); } if (centroid.equalsPoint(S2Point.ZERO)) { centroid = S2Point.X_POS; } else { centroid = centroid.normalize(); } // Use the centroid as the cap axis, and expand the cap angle so that it contains the bounding // caps of all the individual cells. Note that it is *not* sufficient to just bound all the // cell vertices because the bounding cap may be concave (i.e. cover more than one hemisphere). S2Cap cap = S2Cap.fromAxisChord(centroid, S1ChordAngle.ZERO); for (S2CellId id : this) { cap = cap.addCap(new S2Cell(id).getCapBound()); } return cap; } @Override public S2LatLngRect getRectBound() { S2LatLngRect.Builder builder = S2LatLngRect.Builder.empty(); for (S2CellId id : this) { builder.union(new S2Cell(id).getRectBound()); } return builder.build(); } @Override public void getCellUnionBound(Collection<S2CellId> results) { results.clear(); results.addAll(cellIds); } /** This is a fast operation (logarithmic in the size of the cell union). */ @Override public boolean mayIntersect(S2Cell cell) { return intersects(cell.id()); } /** * The point 'p' does not need to be normalized. This is a fast operation (logarithmic in the size * of the cell union). */ @Override @JsMethod(name = "containsPoint") public boolean contains(S2Point p) { return contains(S2CellId.fromPoint(p)); } /** * The number of leaf cells covered by the union. This will be no more than 6*2^60 for the whole * sphere. * * @return the number of leaf cells covered by the union */ public long leafCellsCovered() { long numLeaves = 0; for (S2CellId cellId : cellIds) { int invertedLevel = S2CellId.MAX_LEVEL - cellId.level(); numLeaves += (1L << (invertedLevel << 1)); } return numLeaves; } /** * Approximate this cell union's area by summing the average area of each contained cell's average * area, using {@link S2Cell#averageArea()}. This is equivalent to the number of leaves covered, * multiplied by the average area of a leaf. * * <p>Note that {@link S2Cell#averageArea()} does not take into account distortion of cell, and * thus may be off by up to a factor of 1.7. NOTE: Since this is proportional to * LeafCellsCovered(), it is always better to use the other function if all you care about is the * relative average area between objects. * * @return the sum of the average area of each contained cell's average area */ public double averageBasedArea() { return S2Cell.averageArea(S2CellId.MAX_LEVEL) * leafCellsCovered(); } /** * Calculates this cell union's area by summing the approximate area for each contained cell, * using {@link S2Cell#approxArea()}. * * @return approximate area of the cell union */ public double approxArea() { double area = 0; for (S2CellId cellId : cellIds) { area += new S2Cell(cellId).approxArea(); } return area; } /** * Calculates this cell union's area by summing the exact area for each contained cell, using the * {@link S2Cell#exactArea()}. * * @return the exact area of the cell union */ public double exactArea() { double area = 0; for (S2CellId cellId : cellIds) { area += new S2Cell(cellId).exactArea(); } return area; } /** Return true if two cell unions are identical. */ @Override public boolean equals(Object that) { if (!(that instanceof S2CellUnion)) { return false; } S2CellUnion union = (S2CellUnion) that; return this.cellIds.equals(union.cellIds); } @Override public int hashCode() { int value = 17; for (S2CellId id : this) { value = 37 * value + id.hashCode(); } return value; } @Override public String toString() { return cellIds.toString(); } /** * Normalizes the cell union by discarding cells that are contained by other cells, replacing * groups of 4 child cells by their parent cell whenever possible, and sorting all the cell ids in * increasing order. Returns true if the number of cells was reduced. * * <p>This method *must* be called before doing any calculations on the cell union, such as * intersects() or contains(). */ @CanIgnoreReturnValue public boolean normalize() { return normalize(cellIds); } /** Like {@link #normalize()}, but works directly with a vector of S2CellIds. */ @JsIgnore @CanIgnoreReturnValue public static boolean normalize(List<S2CellId> ids) { // Optimize the representation by looking for cases where all subcells of a parent cell are // present. Collections.sort(ids); int out = 0; for (int i = 0; i < ids.size(); i++) { S2CellId id = ids.get(i); // Check whether this cell is contained by the previous cell. if (out > 0 && ids.get(out - 1).contains(id)) { continue; } // Discard any previous cells contained by this cell. while (out > 0 && id.contains(ids.get(out - 1))) { out--; } // Check whether the last 3 elements of "output" plus "id" can be collapsed into a single // parent cell. while (out >= 3) { // A necessary (but not sufficient) condition is that the XOR of the four cells must be // zero. This is also very fast to test. if ((ids.get(out - 3).id() ^ ids.get(out - 2).id() ^ ids.get(out - 1).id()) != id.id()) { break; } // Now we do a slightly more expensive but exact test. First, compute a mask that blocks out // the two bits that encode the child position of "id" with respect to its parent, then // check that the other three children all agree with "mask. long mask = id.lowestOnBit() << 1; mask = ~(mask + (mask << 1)); long idMasked = (id.id() & mask); if ((ids.get(out - 3).id() & mask) != idMasked || (ids.get(out - 2).id() & mask) != idMasked || (ids.get(out - 1).id() & mask) != idMasked || id.isFace()) { break; } // Replace four children by their parent cell. id = id.parent(); out -= 3; } ids.set(out++, id); } int size = ids.size(); boolean trimmed = out < size; while (out < size) { size--; ids.remove(size); } return trimmed; } /** * Writes a simple lossless encoding of this cell union to the given output stream. This encoding * uses 1 byte for a version number, and N+1 64-bit longs where the first is the number of longs * that follow. * * @throws IOException there is a problem writing to the underlying stream */ @JsIgnore public void encode(OutputStream output) throws IOException { output.write(LOSSLESS_ENCODING_VERSION); LittleEndianOutput.writeLong(output, cellIds.size()); for (S2CellId cellId : this) { LittleEndianOutput.writeLong(output, cellId.id()); } } /** * Decodes an S2CellUnion encoded with encode(). Returns true on success. Decodes all the cell ids * immediately, i.e. is not lazy. * * <p>Use this method if a number of S2 objects will be decoded from the same underlying stream. * * @throws IOException there is a problem reading from the underlying stream, the version number * doesn't match, or the number of elements to read is not between 0 and 2^31-1. */ @JsIgnore public static S2CellUnion decode(InputStream input) throws IOException { // Should contain at least version and vector length. byte version = (byte) input.read(); if (version != LOSSLESS_ENCODING_VERSION) { throw new IOException("Unrecognized version number " + version); } long numCells = LittleEndianInput.readLong(input); if (numCells < 0 || numCells > Integer.MAX_VALUE) { throw new IOException("Unsupported number of cells encountered: " + numCells); } S2CellUnion result = new S2CellUnion(); for (int i = 0; i < numCells; i++) { result.cellIds().add(new S2CellId(LittleEndianInput.readLong(input))); } return result; } }
googleapis/google-cloud-java
36,534
java-kmsinventory/proto-google-cloud-kmsinventory-v1/src/main/java/com/google/cloud/kms/inventory/v1/ListCryptoKeysResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/kms/inventory/v1/key_dashboard_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.kms.inventory.v1; /** * * * <pre> * Response message for * [KeyDashboardService.ListCryptoKeys][google.cloud.kms.inventory.v1.KeyDashboardService.ListCryptoKeys]. * </pre> * * Protobuf type {@code google.cloud.kms.inventory.v1.ListCryptoKeysResponse} */ public final class ListCryptoKeysResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.kms.inventory.v1.ListCryptoKeysResponse) ListCryptoKeysResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListCryptoKeysResponse.newBuilder() to construct. private ListCryptoKeysResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListCryptoKeysResponse() { cryptoKeys_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListCryptoKeysResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.kms.inventory.v1.KeyDashboardServiceProto .internal_static_google_cloud_kms_inventory_v1_ListCryptoKeysResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.kms.inventory.v1.KeyDashboardServiceProto .internal_static_google_cloud_kms_inventory_v1_ListCryptoKeysResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse.class, com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse.Builder.class); } public static final int CRYPTO_KEYS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.kms.v1.CryptoKey> cryptoKeys_; /** * * * <pre> * The list of [CryptoKeys][google.cloud.kms.v1.CryptoKey]. * </pre> * * <code>repeated .google.cloud.kms.v1.CryptoKey crypto_keys = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.kms.v1.CryptoKey> getCryptoKeysList() { return cryptoKeys_; } /** * * * <pre> * The list of [CryptoKeys][google.cloud.kms.v1.CryptoKey]. * </pre> * * <code>repeated .google.cloud.kms.v1.CryptoKey crypto_keys = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.kms.v1.CryptoKeyOrBuilder> getCryptoKeysOrBuilderList() { return cryptoKeys_; } /** * * * <pre> * The list of [CryptoKeys][google.cloud.kms.v1.CryptoKey]. * </pre> * * <code>repeated .google.cloud.kms.v1.CryptoKey crypto_keys = 1;</code> */ @java.lang.Override public int getCryptoKeysCount() { return cryptoKeys_.size(); } /** * * * <pre> * The list of [CryptoKeys][google.cloud.kms.v1.CryptoKey]. * </pre> * * <code>repeated .google.cloud.kms.v1.CryptoKey crypto_keys = 1;</code> */ @java.lang.Override public com.google.cloud.kms.v1.CryptoKey getCryptoKeys(int index) { return cryptoKeys_.get(index); } /** * * * <pre> * The list of [CryptoKeys][google.cloud.kms.v1.CryptoKey]. * </pre> * * <code>repeated .google.cloud.kms.v1.CryptoKey crypto_keys = 1;</code> */ @java.lang.Override public com.google.cloud.kms.v1.CryptoKeyOrBuilder getCryptoKeysOrBuilder(int index) { return cryptoKeys_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * The page token returned from the previous response if the next page is * desired. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * The page token returned from the previous response if the next page is * desired. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < cryptoKeys_.size(); i++) { output.writeMessage(1, cryptoKeys_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < cryptoKeys_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, cryptoKeys_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse)) { return super.equals(obj); } com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse other = (com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse) obj; if (!getCryptoKeysList().equals(other.getCryptoKeysList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getCryptoKeysCount() > 0) { hash = (37 * hash) + CRYPTO_KEYS_FIELD_NUMBER; hash = (53 * hash) + getCryptoKeysList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for * [KeyDashboardService.ListCryptoKeys][google.cloud.kms.inventory.v1.KeyDashboardService.ListCryptoKeys]. * </pre> * * Protobuf type {@code google.cloud.kms.inventory.v1.ListCryptoKeysResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.kms.inventory.v1.ListCryptoKeysResponse) com.google.cloud.kms.inventory.v1.ListCryptoKeysResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.kms.inventory.v1.KeyDashboardServiceProto .internal_static_google_cloud_kms_inventory_v1_ListCryptoKeysResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.kms.inventory.v1.KeyDashboardServiceProto .internal_static_google_cloud_kms_inventory_v1_ListCryptoKeysResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse.class, com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse.Builder.class); } // Construct using com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (cryptoKeysBuilder_ == null) { cryptoKeys_ = java.util.Collections.emptyList(); } else { cryptoKeys_ = null; cryptoKeysBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.kms.inventory.v1.KeyDashboardServiceProto .internal_static_google_cloud_kms_inventory_v1_ListCryptoKeysResponse_descriptor; } @java.lang.Override public com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse getDefaultInstanceForType() { return com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse build() { com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse buildPartial() { com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse result = new com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse result) { if (cryptoKeysBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { cryptoKeys_ = java.util.Collections.unmodifiableList(cryptoKeys_); bitField0_ = (bitField0_ & ~0x00000001); } result.cryptoKeys_ = cryptoKeys_; } else { result.cryptoKeys_ = cryptoKeysBuilder_.build(); } } private void buildPartial0(com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse) { return mergeFrom((com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse other) { if (other == com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse.getDefaultInstance()) return this; if (cryptoKeysBuilder_ == null) { if (!other.cryptoKeys_.isEmpty()) { if (cryptoKeys_.isEmpty()) { cryptoKeys_ = other.cryptoKeys_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureCryptoKeysIsMutable(); cryptoKeys_.addAll(other.cryptoKeys_); } onChanged(); } } else { if (!other.cryptoKeys_.isEmpty()) { if (cryptoKeysBuilder_.isEmpty()) { cryptoKeysBuilder_.dispose(); cryptoKeysBuilder_ = null; cryptoKeys_ = other.cryptoKeys_; bitField0_ = (bitField0_ & ~0x00000001); cryptoKeysBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getCryptoKeysFieldBuilder() : null; } else { cryptoKeysBuilder_.addAllMessages(other.cryptoKeys_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.kms.v1.CryptoKey m = input.readMessage( com.google.cloud.kms.v1.CryptoKey.parser(), extensionRegistry); if (cryptoKeysBuilder_ == null) { ensureCryptoKeysIsMutable(); cryptoKeys_.add(m); } else { cryptoKeysBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.kms.v1.CryptoKey> cryptoKeys_ = java.util.Collections.emptyList(); private void ensureCryptoKeysIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { cryptoKeys_ = new java.util.ArrayList<com.google.cloud.kms.v1.CryptoKey>(cryptoKeys_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.kms.v1.CryptoKey, com.google.cloud.kms.v1.CryptoKey.Builder, com.google.cloud.kms.v1.CryptoKeyOrBuilder> cryptoKeysBuilder_; /** * * * <pre> * The list of [CryptoKeys][google.cloud.kms.v1.CryptoKey]. * </pre> * * <code>repeated .google.cloud.kms.v1.CryptoKey crypto_keys = 1;</code> */ public java.util.List<com.google.cloud.kms.v1.CryptoKey> getCryptoKeysList() { if (cryptoKeysBuilder_ == null) { return java.util.Collections.unmodifiableList(cryptoKeys_); } else { return cryptoKeysBuilder_.getMessageList(); } } /** * * * <pre> * The list of [CryptoKeys][google.cloud.kms.v1.CryptoKey]. * </pre> * * <code>repeated .google.cloud.kms.v1.CryptoKey crypto_keys = 1;</code> */ public int getCryptoKeysCount() { if (cryptoKeysBuilder_ == null) { return cryptoKeys_.size(); } else { return cryptoKeysBuilder_.getCount(); } } /** * * * <pre> * The list of [CryptoKeys][google.cloud.kms.v1.CryptoKey]. * </pre> * * <code>repeated .google.cloud.kms.v1.CryptoKey crypto_keys = 1;</code> */ public com.google.cloud.kms.v1.CryptoKey getCryptoKeys(int index) { if (cryptoKeysBuilder_ == null) { return cryptoKeys_.get(index); } else { return cryptoKeysBuilder_.getMessage(index); } } /** * * * <pre> * The list of [CryptoKeys][google.cloud.kms.v1.CryptoKey]. * </pre> * * <code>repeated .google.cloud.kms.v1.CryptoKey crypto_keys = 1;</code> */ public Builder setCryptoKeys(int index, com.google.cloud.kms.v1.CryptoKey value) { if (cryptoKeysBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCryptoKeysIsMutable(); cryptoKeys_.set(index, value); onChanged(); } else { cryptoKeysBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of [CryptoKeys][google.cloud.kms.v1.CryptoKey]. * </pre> * * <code>repeated .google.cloud.kms.v1.CryptoKey crypto_keys = 1;</code> */ public Builder setCryptoKeys( int index, com.google.cloud.kms.v1.CryptoKey.Builder builderForValue) { if (cryptoKeysBuilder_ == null) { ensureCryptoKeysIsMutable(); cryptoKeys_.set(index, builderForValue.build()); onChanged(); } else { cryptoKeysBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of [CryptoKeys][google.cloud.kms.v1.CryptoKey]. * </pre> * * <code>repeated .google.cloud.kms.v1.CryptoKey crypto_keys = 1;</code> */ public Builder addCryptoKeys(com.google.cloud.kms.v1.CryptoKey value) { if (cryptoKeysBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCryptoKeysIsMutable(); cryptoKeys_.add(value); onChanged(); } else { cryptoKeysBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of [CryptoKeys][google.cloud.kms.v1.CryptoKey]. * </pre> * * <code>repeated .google.cloud.kms.v1.CryptoKey crypto_keys = 1;</code> */ public Builder addCryptoKeys(int index, com.google.cloud.kms.v1.CryptoKey value) { if (cryptoKeysBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCryptoKeysIsMutable(); cryptoKeys_.add(index, value); onChanged(); } else { cryptoKeysBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of [CryptoKeys][google.cloud.kms.v1.CryptoKey]. * </pre> * * <code>repeated .google.cloud.kms.v1.CryptoKey crypto_keys = 1;</code> */ public Builder addCryptoKeys(com.google.cloud.kms.v1.CryptoKey.Builder builderForValue) { if (cryptoKeysBuilder_ == null) { ensureCryptoKeysIsMutable(); cryptoKeys_.add(builderForValue.build()); onChanged(); } else { cryptoKeysBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of [CryptoKeys][google.cloud.kms.v1.CryptoKey]. * </pre> * * <code>repeated .google.cloud.kms.v1.CryptoKey crypto_keys = 1;</code> */ public Builder addCryptoKeys( int index, com.google.cloud.kms.v1.CryptoKey.Builder builderForValue) { if (cryptoKeysBuilder_ == null) { ensureCryptoKeysIsMutable(); cryptoKeys_.add(index, builderForValue.build()); onChanged(); } else { cryptoKeysBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of [CryptoKeys][google.cloud.kms.v1.CryptoKey]. * </pre> * * <code>repeated .google.cloud.kms.v1.CryptoKey crypto_keys = 1;</code> */ public Builder addAllCryptoKeys( java.lang.Iterable<? extends com.google.cloud.kms.v1.CryptoKey> values) { if (cryptoKeysBuilder_ == null) { ensureCryptoKeysIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, cryptoKeys_); onChanged(); } else { cryptoKeysBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of [CryptoKeys][google.cloud.kms.v1.CryptoKey]. * </pre> * * <code>repeated .google.cloud.kms.v1.CryptoKey crypto_keys = 1;</code> */ public Builder clearCryptoKeys() { if (cryptoKeysBuilder_ == null) { cryptoKeys_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { cryptoKeysBuilder_.clear(); } return this; } /** * * * <pre> * The list of [CryptoKeys][google.cloud.kms.v1.CryptoKey]. * </pre> * * <code>repeated .google.cloud.kms.v1.CryptoKey crypto_keys = 1;</code> */ public Builder removeCryptoKeys(int index) { if (cryptoKeysBuilder_ == null) { ensureCryptoKeysIsMutable(); cryptoKeys_.remove(index); onChanged(); } else { cryptoKeysBuilder_.remove(index); } return this; } /** * * * <pre> * The list of [CryptoKeys][google.cloud.kms.v1.CryptoKey]. * </pre> * * <code>repeated .google.cloud.kms.v1.CryptoKey crypto_keys = 1;</code> */ public com.google.cloud.kms.v1.CryptoKey.Builder getCryptoKeysBuilder(int index) { return getCryptoKeysFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of [CryptoKeys][google.cloud.kms.v1.CryptoKey]. * </pre> * * <code>repeated .google.cloud.kms.v1.CryptoKey crypto_keys = 1;</code> */ public com.google.cloud.kms.v1.CryptoKeyOrBuilder getCryptoKeysOrBuilder(int index) { if (cryptoKeysBuilder_ == null) { return cryptoKeys_.get(index); } else { return cryptoKeysBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of [CryptoKeys][google.cloud.kms.v1.CryptoKey]. * </pre> * * <code>repeated .google.cloud.kms.v1.CryptoKey crypto_keys = 1;</code> */ public java.util.List<? extends com.google.cloud.kms.v1.CryptoKeyOrBuilder> getCryptoKeysOrBuilderList() { if (cryptoKeysBuilder_ != null) { return cryptoKeysBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(cryptoKeys_); } } /** * * * <pre> * The list of [CryptoKeys][google.cloud.kms.v1.CryptoKey]. * </pre> * * <code>repeated .google.cloud.kms.v1.CryptoKey crypto_keys = 1;</code> */ public com.google.cloud.kms.v1.CryptoKey.Builder addCryptoKeysBuilder() { return getCryptoKeysFieldBuilder() .addBuilder(com.google.cloud.kms.v1.CryptoKey.getDefaultInstance()); } /** * * * <pre> * The list of [CryptoKeys][google.cloud.kms.v1.CryptoKey]. * </pre> * * <code>repeated .google.cloud.kms.v1.CryptoKey crypto_keys = 1;</code> */ public com.google.cloud.kms.v1.CryptoKey.Builder addCryptoKeysBuilder(int index) { return getCryptoKeysFieldBuilder() .addBuilder(index, com.google.cloud.kms.v1.CryptoKey.getDefaultInstance()); } /** * * * <pre> * The list of [CryptoKeys][google.cloud.kms.v1.CryptoKey]. * </pre> * * <code>repeated .google.cloud.kms.v1.CryptoKey crypto_keys = 1;</code> */ public java.util.List<com.google.cloud.kms.v1.CryptoKey.Builder> getCryptoKeysBuilderList() { return getCryptoKeysFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.kms.v1.CryptoKey, com.google.cloud.kms.v1.CryptoKey.Builder, com.google.cloud.kms.v1.CryptoKeyOrBuilder> getCryptoKeysFieldBuilder() { if (cryptoKeysBuilder_ == null) { cryptoKeysBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.kms.v1.CryptoKey, com.google.cloud.kms.v1.CryptoKey.Builder, com.google.cloud.kms.v1.CryptoKeyOrBuilder>( cryptoKeys_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); cryptoKeys_ = null; } return cryptoKeysBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * The page token returned from the previous response if the next page is * desired. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The page token returned from the previous response if the next page is * desired. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The page token returned from the previous response if the next page is * desired. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The page token returned from the previous response if the next page is * desired. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The page token returned from the previous response if the next page is * desired. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.kms.inventory.v1.ListCryptoKeysResponse) } // @@protoc_insertion_point(class_scope:google.cloud.kms.inventory.v1.ListCryptoKeysResponse) private static final com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse(); } public static com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListCryptoKeysResponse> PARSER = new com.google.protobuf.AbstractParser<ListCryptoKeysResponse>() { @java.lang.Override public ListCryptoKeysResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListCryptoKeysResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListCryptoKeysResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.kms.inventory.v1.ListCryptoKeysResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/solr
36,570
solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.core; import com.google.common.annotations.VisibleForTesting; import java.io.Closeable; import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; import java.lang.invoke.MethodHandles; import java.lang.reflect.Constructor; import java.net.MalformedURLException; import java.net.URL; import java.net.URLClassLoader; import java.nio.charset.CharacterCodingException; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.nio.file.DirectoryStream; import java.nio.file.FileSystems; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.PathMatcher; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.function.Function; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; import org.apache.lucene.analysis.CharFilterFactory; import org.apache.lucene.analysis.TokenFilterFactory; import org.apache.lucene.analysis.TokenizerFactory; import org.apache.lucene.analysis.WordlistLoader; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.ResourceLoader; import org.apache.lucene.util.ResourceLoaderAware; import org.apache.solr.common.SolrException; import org.apache.solr.common.cloud.SolrClassLoader; import org.apache.solr.common.util.EnvUtils; import org.apache.solr.handler.component.SearchComponent; import org.apache.solr.handler.component.ShardHandlerFactory; import org.apache.solr.logging.DeprecationLog; import org.apache.solr.pkg.PackageListeningClassLoader; import org.apache.solr.pkg.SolrPackageLoader; import org.apache.solr.request.SolrRequestHandler; import org.apache.solr.response.QueryResponseWriter; import org.apache.solr.rest.RestManager; import org.apache.solr.schema.FieldType; import org.apache.solr.schema.ManagedIndexSchemaFactory; import org.apache.solr.schema.SimilarityFactory; import org.apache.solr.search.QParserPlugin; import org.apache.solr.update.processor.UpdateRequestProcessorFactory; import org.apache.solr.util.circuitbreaker.CircuitBreaker; import org.apache.solr.util.plugin.SolrCoreAware; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * @since solr 1.3 */ public class SolrResourceLoader implements ResourceLoader, Closeable, SolrClassLoader, SolrCoreAware { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private static final String base = "org.apache.solr"; private static final String[] packages = { "", "analysis.", "schema.", "handler.", "handler.tagger.", "search.", "update.", "core.", "response.", "request.", "update.processor.", "util.", "util.circuitbreaker.", "spelling.", "handler.component.", "spelling.suggest.", "spelling.suggest.fst.", "rest.schema.analysis.", "security.", "handler.admin.", "security.jwt.", "security.cert.", "handler.sql.", "crossdc.handler.", "crossdc.update.processor." }; private static final Charset UTF_8 = StandardCharsets.UTF_8; public static final String SOLR_RESOURCELOADING_RESTRICTED_ENABLED_PARAM = "solr.resourceloading.restricted.enabled"; private final boolean restrictUnsafeResourceloading; private String name = ""; protected URLClassLoader classLoader; private final Path instanceDir; private String coreName; private UUID coreId; private SolrConfig config; private CoreContainer coreContainer; private PackageListeningClassLoader schemaLoader; private PackageListeningClassLoader coreReloadingClassLoader; private final List<SolrCoreAware> waitingForCore = Collections.synchronizedList(new ArrayList<>()); private final List<SolrInfoBean> infoMBeans = Collections.synchronizedList(new ArrayList<>()); private final List<ResourceLoaderAware> waitingForResources = Collections.synchronizedList(new ArrayList<>()); private volatile boolean live; // Provide a registry so that managed resources can register themselves while the XML // configuration documents are being parsed ... after all are registered, they are asked by the // RestManager to initialize themselves. This two-step process is required because not all // resources are available (such as the SolrZkClient) when XML docs are being parsed. private RestManager.Registry managedResourceRegistry; /** * @see #reloadLuceneSPI() */ private boolean needToReloadLuceneSPI = false; // requires synchronization public synchronized RestManager.Registry getManagedResourceRegistry() { if (managedResourceRegistry == null) { managedResourceRegistry = new RestManager.Registry(); } return managedResourceRegistry; } public SolrClassLoader getSchemaLoader() { if (schemaLoader == null) { schemaLoader = createSchemaLoader(); } return schemaLoader; } /** Creates a loader. Note: we do NOT call {@link #reloadLuceneSPI()}. */ public SolrResourceLoader( String name, List<Path> classpath, Path instanceDir, ClassLoader parent) { this(instanceDir, parent); this.name = name; final List<URL> libUrls = new ArrayList<>(classpath.size()); try { for (Path path : classpath) { libUrls.add(path.toUri().normalize().toURL()); } } catch (MalformedURLException e) { // impossible? throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e); } addToClassLoader(libUrls); } /** * Creates a loader. * * @param instanceDir - base directory for this resource loader, must not be null */ public SolrResourceLoader(Path instanceDir) { this(instanceDir, null); } /** * This loader will delegate to Solr's classloader when possible, otherwise it will attempt to * resolve resources using any jar files found in the "lib/" directory in the specified instance * directory. */ public SolrResourceLoader(Path instanceDir, ClassLoader parent) { restrictUnsafeResourceloading = EnvUtils.getPropertyAsBool(SOLR_RESOURCELOADING_RESTRICTED_ENABLED_PARAM, true); if (instanceDir == null) { throw new NullPointerException("SolrResourceLoader instanceDir must be non-null"); } this.instanceDir = instanceDir; log.debug("new SolrResourceLoader for directory: '{}'", this.instanceDir); if (parent == null) { parent = getClass().getClassLoader(); } this.classLoader = URLClassLoader.newInstance(new URL[0], parent); } /** * Adds URLs to the ResourceLoader's internal classloader. This method <b>MUST</b> only be called * prior to using this ResourceLoader to get any resources, otherwise its behavior will be * non-deterministic. You also have to {link @reloadLuceneSPI} before using this ResourceLoader. * * @param urls the URLs of files to add */ synchronized void addToClassLoader(List<URL> urls) { URLClassLoader newLoader = addURLsToClassLoader(classLoader, urls); if (newLoader == classLoader) { return; // short-circuit } this.classLoader = newLoader; this.needToReloadLuceneSPI = true; if (log.isInfoEnabled()) { log.info( "Added {} libs to classloader, from paths: {}", urls.size(), urls.stream() .map(u -> u.getPath().substring(0, u.getPath().lastIndexOf('/'))) .sorted() .distinct() .collect(Collectors.toList())); } } /** * Reloads all Lucene SPI implementations using the new classloader. This method must be called * after {@link #addToClassLoader(List)} and before using this ResourceLoader. */ synchronized void reloadLuceneSPI() { // TODO improve to use a static Set<URL> to check when we need to if (!needToReloadLuceneSPI) { return; } needToReloadLuceneSPI = false; // reset log.debug("Reloading Lucene SPI"); // Codecs: PostingsFormat.reloadPostingsFormats(this.classLoader); DocValuesFormat.reloadDocValuesFormats(this.classLoader); Codec.reloadCodecs(this.classLoader); // Analysis: CharFilterFactory.reloadCharFilters(this.classLoader); TokenFilterFactory.reloadTokenFilters(this.classLoader); TokenizerFactory.reloadTokenizers(this.classLoader); } private static URLClassLoader addURLsToClassLoader( final URLClassLoader oldLoader, List<URL> urls) { if (urls.size() == 0) { return oldLoader; } List<URL> allURLs = new ArrayList<>(); allURLs.addAll(Arrays.asList(oldLoader.getURLs())); allURLs.addAll(urls); for (URL url : urls) { if (log.isDebugEnabled()) { log.debug("Adding '{}' to classloader", url); } } ClassLoader oldParent = oldLoader.getParent(); IOUtils.closeWhileHandlingException(oldLoader); return URLClassLoader.newInstance(allURLs.toArray(new URL[0]), oldParent); } /** * Utility method to get the URLs of all paths under a given directory that match a filter * * @param libDir the root directory * @param filter the filter * @return all matching URLs * @throws IOException on error */ public static List<URL> getURLs(Path libDir, DirectoryStream.Filter<Path> filter) throws IOException { List<URL> urls = new ArrayList<>(); try (DirectoryStream<Path> directory = Files.newDirectoryStream(libDir, filter)) { for (Path element : directory) { urls.add(element.toUri().normalize().toURL()); } } return urls; } /** * Utility method to get the URLs of all paths under a given directory * * @param libDir the root directory * @return all subdirectories as URLs * @throws IOException on error */ public static List<URL> getURLs(Path libDir) throws IOException { return getURLs(libDir, entry -> true); } /** * Utility method to get the URLs of all paths under a given directory that match a regex * * @param libDir the root directory * @param regex the regex as a String * @return all matching URLs * @throws IOException on error */ public static List<URL> getFilteredURLs(Path libDir, String regex) throws IOException { final PathMatcher matcher = libDir.getFileSystem().getPathMatcher("regex:" + regex); return getURLs(libDir, entry -> matcher.matches(entry.getFileName())); } public Path getConfigPath() { return instanceDir.resolve("conf"); } /** * EXPERT * * <p>The underlying class loader. Most applications will not need to use this. * * @return The {@link ClassLoader} */ public ClassLoader getClassLoader() { return classLoader; } /** * Opens any resource by its name. By default, this will look in multiple locations to load the * resource: $configDir/$resource (if resource is not absolute) $CWD/$resource otherwise, it will * look for it in any jar accessible through the class loader. Override this method to customize * loading resources. * * @return the stream for the named resource */ @Override public InputStream openResource(String resource) throws IOException { if (resource.trim().startsWith("\\\\")) { // Always disallow UNC paths throw new SolrResourceNotFoundException("Resource '" + resource + "' could not be loaded."); } Path instanceDir = getInstancePath().normalize(); Path inInstanceDir = getInstancePath().resolve(resource).normalize(); Path inConfigDir = instanceDir.resolve("conf").resolve(resource).normalize(); if (!restrictUnsafeResourceloading || inInstanceDir.startsWith(instanceDir)) { // The resource is either inside instance dir or we allow unsafe loading, so allow testing if // file exists if (Files.exists(inConfigDir) && Files.isReadable(inConfigDir)) { return new SolrFileInputStream(inConfigDir); } if (Files.exists(inInstanceDir) && Files.isReadable(inInstanceDir)) { return new SolrFileInputStream(inInstanceDir); } } // Delegate to the class loader (looking into $INSTANCE_DIR/lib jars). // We need a ClassLoader-compatible (forward-slashes) path here! InputStream is = classLoader.getResourceAsStream( resource.replace(FileSystems.getDefault().getSeparator(), "/")); // This is a hack just for tests (it is not done in ZKResourceLoader)! // TODO can we nuke this? if (is == null && System.getProperty("jetty.testMode") != null) { is = classLoader.getResourceAsStream( ("conf/" + resource.replace(FileSystems.getDefault().getSeparator(), "/"))); } if (is == null) { throw new SolrResourceNotFoundException( "Can't find resource '" + resource + "' in classpath or '" + instanceDir + "'"); } return is; } /** Report the location of a resource found by the resource loader */ public String resourceLocation(String resource) { if (resource.trim().startsWith("\\\\")) { // Disallow UNC return null; } Path inInstanceDir = instanceDir.resolve(resource).normalize(); Path inConfigDir = instanceDir.resolve("conf").resolve(resource).normalize(); if (!restrictUnsafeResourceloading || inInstanceDir.startsWith(instanceDir.normalize())) { if (Files.exists(inConfigDir) && Files.isReadable(inConfigDir)) return inConfigDir.normalize().toString(); if (Files.exists(inInstanceDir) && Files.isReadable(inInstanceDir)) return inInstanceDir.normalize().toString(); } try (InputStream is = classLoader.getResourceAsStream( resource.replace(FileSystems.getDefault().getSeparator(), "/"))) { if (is != null) return "classpath:" + resource; } catch (IOException e) { // ignore } return restrictUnsafeResourceloading ? null : resource; } /** * Accesses a resource by name and returns the (non comment) lines containing data. * * <p>A comment line is any line that starts with the character "#" * * @return a list of non-blank non-comment lines with whitespace trimmed from front and back. * @throws IOException If there is a low-level I/O error. */ public List<String> getLines(String resource) throws IOException { return getLines(resource, UTF_8); } /** * Accesses a resource by name and returns the (non comment) lines containing data using the given * character encoding. * * <p>A comment line is any line that starts with the character "#" * * @param resource the file to be read * @return a list of non-blank non-comment lines with whitespace trimmed * @throws IOException If there is a low-level I/O error. */ public List<String> getLines(String resource, String encoding) throws IOException { return getLines(resource, Charset.forName(encoding)); } public List<String> getLines(String resource, Charset charset) throws IOException { try { return WordlistLoader.getLines(openResource(resource), charset); } catch (CharacterCodingException ex) { throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "Error loading resource (wrong encoding?): " + resource, ex); } } /* * A static map of short class name to fully qualified class name */ private static final Map<String, String> classNameCache = new ConcurrentHashMap<>(); @VisibleForTesting static void clearCache() { classNameCache.clear(); } // Using this pattern, legacy analysis components from previous Solr versions are identified and // delegated to SPI loader: private static final Pattern legacyAnalysisPattern = Pattern.compile( "((\\Q" + base + ".analysis.\\E)|(\\Qsolr.\\E))([\\p{L}_$][\\p{L}\\p{N}_$]+?)(TokenFilter|Filter|Tokenizer|CharFilter)Factory"); @Override public <T> Class<? extends T> findClass(String cname, Class<T> expectedType) { return findClass(cname, expectedType, empty); } /** * This method loads a class either with its FQN or a short-name (solr.class-simplename or * class-simplename). It tries to load the class with the name that is given first and if it * fails, it tries all the known solr packages. This method caches the FQN of a short-name in a * static map in-order to make subsequent lookups for the same class faster. The caching is done * only if the class is loaded by the webapp classloader and it is loaded using a shortname. * * @param cname The name or the short name of the class. * @param subpackages the packages to be tried if the cname starts with solr. * @return the loaded class. An exception is thrown if it fails */ public <T> Class<? extends T> findClass( String cname, Class<T> expectedType, String... subpackages) { if (subpackages == null || subpackages.length == 0 || subpackages == packages) { subpackages = packages; String c = classNameCache.get(cname); if (c != null) { try { return Class.forName(c, true, classLoader).asSubclass(expectedType); } catch (ClassNotFoundException | ClassCastException e) { // this can happen if the legacyAnalysisPattern below caches the wrong thing log.warn( "{} Unable to load cached class, attempting lookup. name={} shortname={} reason={}", name, c, cname, e); classNameCache.remove(cname); } } } Class<? extends T> clazz; clazz = getPackageClass(cname, expectedType); if (clazz != null) return clazz; try { // first try legacy analysis patterns, now replaced by Lucene's Analysis package: final Matcher m = legacyAnalysisPattern.matcher(cname); if (m.matches()) { final String name = m.group(4); log.trace("Trying to load class from analysis SPI using name='{}'", name); try { if (CharFilterFactory.class.isAssignableFrom(expectedType)) { return clazz = CharFilterFactory.lookupClass(name).asSubclass(expectedType); } else if (TokenizerFactory.class.isAssignableFrom(expectedType)) { return clazz = TokenizerFactory.lookupClass(name).asSubclass(expectedType); } else if (TokenFilterFactory.class.isAssignableFrom(expectedType)) { return clazz = TokenFilterFactory.lookupClass(name).asSubclass(expectedType); } else { log.warn( "'{}' looks like an analysis factory, but caller requested different class type: {}", cname, expectedType.getName()); } } catch (IllegalArgumentException ex) { // ok, we fall back to legacy loading } } // first try cname == full name try { return clazz = Class.forName(cname, true, classLoader).asSubclass(expectedType); } catch (ClassNotFoundException e) { String newName = cname; if (newName.startsWith("solr")) { newName = cname.substring("solr".length() + 1); } for (String subpackage : subpackages) { try { String name = base + '.' + subpackage + newName; log.trace("Trying class name {}", name); return clazz = Class.forName(name, true, classLoader).asSubclass(expectedType); } catch (ClassNotFoundException e1) { // ignore... assume first exception is best. } } throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, name + " Error loading class '" + cname + "'", e); } } finally { if (clazz != null) { // cache the shortname vs FQN if it is loaded by the webapp classloader and it is loaded // using a shortname if (clazz.getClassLoader() == SolrResourceLoader.class.getClassLoader() && !cname.equals(clazz.getName()) && (subpackages.length == 0 || subpackages == packages)) { // store in the cache classNameCache.put(cname, clazz.getName()); } // print warning if class is deprecated if (clazz.isAnnotationPresent(Deprecated.class)) { DeprecationLog.log( cname, "Solr loaded a deprecated plugin/analysis class [" + cname + "]. Please consult documentation how to replace it accordingly."); } } } } private <T> Class<? extends T> getPackageClass(String cname, Class<T> expectedType) { PluginInfo.ClassName cName = PluginInfo.parseClassName(cname); if (cName.pkg == null) return null; ResourceLoaderAware aware = CURRENT_AWARE.get(); if (aware != null) { // this is invoked from a component // let's check if it's a schema component Class<?> type = assertAwareCompatibility(ResourceLoaderAware.class, aware); if (schemaResourceLoaderComponents.contains(type)) { // this is a schema component // let's use package-aware schema classloader return getSchemaLoader().findClass(cname, expectedType); } } return null; } static final String[] empty = new String[0]; @Override public <T> T newInstance(String name, Class<T> expectedType) { return newInstance(name, expectedType, empty); } private static final Class<?>[] NO_CLASSES = new Class<?>[0]; private static final Object[] NO_OBJECTS = new Object[0]; @Override public <T> T newInstance(String cname, Class<T> expectedType, String... subpackages) { return newInstance(cname, expectedType, subpackages, NO_CLASSES, NO_OBJECTS); } @Override public <T> T newInstance( String cName, Class<T> expectedType, String[] subPackages, Class<?>[] params, Object[] args) { Class<? extends T> clazz = findClass(cName, expectedType, subPackages); if (clazz == null) { throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "Can not find class: " + cName + " in " + classLoader); } T obj; try { Constructor<? extends T> constructor; try { constructor = clazz.getConstructor(params); obj = constructor.newInstance(args); } catch (NoSuchMethodException e) { // look for a zero arg constructor if the constructor args do not match try { constructor = clazz.getConstructor(); obj = constructor.newInstance(); } catch (NoSuchMethodException e1) { throw e; } } } catch (Error err) { log.error( "Loading Class {} ({}) triggered serious java error: {}", cName, clazz.getName(), err.getClass().getName(), err); throw err; } catch (Exception e) { throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "Error instantiating class: '" + clazz.getName() + "'", e); } addToCoreAware(obj); addToResourceLoaderAware(obj); addToInfoBeans(obj); return obj; } public <T> void addToInfoBeans(T obj) { if (!live) { if (obj instanceof SolrInfoBean) { // TODO: Assert here? infoMBeans.add((SolrInfoBean) obj); } } } public <T> boolean addToResourceLoaderAware(T obj) { if (!live) { if (obj instanceof ResourceLoaderAware) { assertAwareCompatibility(ResourceLoaderAware.class, obj); waitingForResources.add((ResourceLoaderAware) obj); } return true; } else { return false; } } /** * the inform() callback should be invoked on the listener. If this is 'live', the callback is not * called so currently this returns 'false' */ public <T> boolean addToCoreAware(T obj) { if (!live) { if (obj instanceof SolrCoreAware) { assertAwareCompatibility(SolrCoreAware.class, obj); waitingForCore.add((SolrCoreAware) obj); } return true; } else { return false; } } protected final void setSolrConfig(SolrConfig config) { if (this.config != null && this.config != config) { throw new IllegalStateException("SolrConfig instance is already associated with this loader"); } this.config = config; } protected final void setCoreContainer(CoreContainer coreContainer) { if (this.coreContainer != null && this.coreContainer != coreContainer) { throw new IllegalStateException( "CoreContainer instance is already associated with this loader"); } this.coreContainer = coreContainer; } protected final void setSolrCore(SolrCore core) { setCoreContainer(core.getCoreContainer()); setSolrConfig(core.getSolrConfig()); this.coreName = core.getName(); this.coreId = core.uniqueId; SolrCore.Provider coreProvider = core.coreProvider; this.coreReloadingClassLoader = new PackageListeningClassLoader( core.getCoreContainer(), this, pkg -> config.maxPackageVersion(pkg), null) { @Override protected void doReloadAction(Ctx ctx) { log.info("Core reloading classloader issued reload for: {}/{} ", coreName, coreId); coreProvider.reload(); } }; core.getPackageListeners().addListener(coreReloadingClassLoader, true); } /** Tell all {@link SolrCoreAware} instances about the SolrCore */ @Override public void inform(SolrCore core) { if (getSchemaLoader() != null) { core.getPackageListeners().addListener(schemaLoader); } // make a copy to avoid potential deadlock of a callback calling newInstance and trying to // add something to waitingForCore. SolrCoreAware[] arr; while (waitingForCore.size() > 0) { synchronized (waitingForCore) { arr = waitingForCore.toArray(new SolrCoreAware[0]); waitingForCore.clear(); } for (SolrCoreAware aware : arr) { aware.inform(core); } } // this is the last method to be called in SolrCore before the latch is released. live = true; } /** Tell all {@link ResourceLoaderAware} instances about the loader */ public void inform(ResourceLoader loader) throws IOException { // make a copy to avoid potential deadlock of a callback adding to the list ResourceLoaderAware[] arr; while (waitingForResources.size() > 0) { synchronized (waitingForResources) { arr = waitingForResources.toArray(new ResourceLoaderAware[0]); waitingForResources.clear(); } for (ResourceLoaderAware aware : arr) { informAware(loader, aware); } } } /** * Set the current {@link ResourceLoaderAware} object in thread local so that appropriate * classloader can be used for package loaded classes */ public static void informAware(ResourceLoader loader, ResourceLoaderAware aware) throws IOException { CURRENT_AWARE.set(aware); try { aware.inform(loader); } finally { CURRENT_AWARE.remove(); } } /** * Register any {@link SolrInfoBean}s * * @param infoRegistry The Info Registry */ public void inform(Map<String, SolrInfoBean> infoRegistry) { // this can currently happen concurrently with requests starting and lazy components // loading. Make sure infoMBeans doesn't change. SolrInfoBean[] arr; synchronized (infoMBeans) { arr = infoMBeans.toArray(new SolrInfoBean[0]); waitingForResources.clear(); } for (SolrInfoBean bean : arr) { // Too slow? I suspect not, but we may need // to start tracking this in a Set. if (!infoRegistry.containsValue(bean)) { try { infoRegistry.put(bean.getName(), bean); } catch (Exception e) { log.warn("could not register MBean '{}'.", bean.getName(), e); } } } } /** * The instance path for this resource loader, as passed in from the constructor. It's absolute * when this is for Solr Home or a Solr Core instance dir. */ public Path getInstancePath() { return instanceDir; } /** Keep a list of classes that are allowed to implement each 'Aware' interface */ private static final Map<Class<?>, Class<?>[]> awareCompatibility; static { awareCompatibility = new HashMap<>(); awareCompatibility.put( SolrCoreAware.class, new Class<?>[] { // DO NOT ADD THINGS TO THIS LIST -- ESPECIALLY THINGS THAT CAN BE CREATED DYNAMICALLY // VIA RUNTIME APIS -- UNTIL CAREFULLY CONSIDERING THE ISSUES MENTIONED IN SOLR-8311 CircuitBreaker.class, CodecFactory.class, DirectoryFactory.class, ManagedIndexSchemaFactory.class, QueryResponseWriter.class, SearchComponent.class, ShardHandlerFactory.class, SimilarityFactory.class, SolrRequestHandler.class, UpdateRequestProcessorFactory.class }); awareCompatibility.put( ResourceLoaderAware.class, new Class<?>[] { // DO NOT ADD THINGS TO THIS LIST -- ESPECIALLY THINGS THAT CAN BE CREATED DYNAMICALLY // VIA RUNTIME APIS -- UNTIL CAREFULLY CONSIDERING THE ISSUES MENTIONED IN SOLR-8311 // evaluate if this must go into schemaResourceLoaderComponents CharFilterFactory.class, TokenFilterFactory.class, TokenizerFactory.class, QParserPlugin.class, FieldType.class }); } /** If these components are trying to load classes, use schema classloader */ private static final Set<Class<?>> schemaResourceLoaderComponents = Set.of( CharFilterFactory.class, TokenFilterFactory.class, TokenizerFactory.class, FieldType.class); /** Utility function to throw an exception if the class is invalid */ public static Class<?> assertAwareCompatibility(Class<?> aware, Object obj) { Class<?>[] valid = awareCompatibility.get(aware); if (valid == null) { throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "Unknown Aware interface: " + aware); } for (Class<?> v : valid) { if (v.isInstance(obj)) { return v; } } StringBuilder builder = new StringBuilder(); builder.append("Invalid 'Aware' object: ").append(obj); builder.append(" -- ").append(aware.getName()); builder.append(" must be an instance of: "); for (Class<?> v : valid) { builder.append("[").append(v.getName()).append("] "); } throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, builder.toString()); } public CoreContainer getCoreContainer() { return coreContainer; } public SolrConfig getSolrConfig() { return config; } @Override public void close() throws IOException { IOUtils.close(classLoader); } public List<SolrInfoBean> getInfoMBeans() { return Collections.unmodifiableList(infoMBeans); } /** * Load a class using an appropriate {@link SolrResourceLoader} depending of the package on that * class * * @param registerCoreReloadListener register a listener for the package and reload the core if * the package is changed. Use this sparingly. This will result in core reloads across all the * cores in all collections using this configset */ public <T> Class<? extends T> findClass( PluginInfo info, Class<T> type, boolean registerCoreReloadListener) { if (info.cName.pkg == null) return findClass(info.className, type); return _classLookup( info, (Function<SolrPackageLoader.SolrPackage.Version, Class<? extends T>>) ver -> ver.getLoader().findClass(info.cName.className, type), registerCoreReloadListener); } private <T> T _classLookup( PluginInfo info, Function<SolrPackageLoader.SolrPackage.Version, T> fun, boolean registerCoreReloadListener) { PluginInfo.ClassName cName = info.cName; if (registerCoreReloadListener) { if (coreReloadingClassLoader == null) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Core not set"); } return fun.apply(coreReloadingClassLoader.findPackageVersion(cName, true)); } else { return fun.apply(coreReloadingClassLoader.findPackageVersion(cName, false)); } } /** * Create an instance of a class using an appropriate {@link SolrResourceLoader} depending on the * package of that class * * @param registerCoreReloadListener register a listener for the package and reload the core if * the package is changed. Use this sparingly. This will result in core reloads across all the * cores in all collections using this configset */ public <T> T newInstance(PluginInfo info, Class<T> type, boolean registerCoreReloadListener) { if (info.cName.pkg == null) { return newInstance( info.cName.className == null ? type.getName() : info.cName.className, type); } return _classLookup( info, version -> version.getLoader().newInstance(info.cName.className, type), registerCoreReloadListener); } private PackageListeningClassLoader createSchemaLoader() { if (coreContainer == null || coreContainer.getPackageLoader() == null) { // can't load from packages if core container is not available, // or if Solr is not in SolrCloud mode return null; } if (config == null) { throw new IllegalStateException( "cannot create package-aware schema loader - no SolrConfig instance is associated with this loader"); } return new PackageListeningClassLoader( coreContainer, this, pkg -> config.maxPackageVersion(pkg), () -> { if (coreContainer != null && coreName != null && coreId != null) { try (SolrCore c = coreContainer.getCore(coreName, coreId)) { if (c != null) { c.fetchLatestSchema(); } } } }); } public static void persistConfLocally( SolrResourceLoader loader, String resourceName, byte[] content) { // Persist locally Path confFile = loader.getConfigPath().resolve(resourceName); try { Files.createDirectories(confFile.getParent()); Files.write(confFile, content); log.info("Written conf file {}", resourceName); } catch (IOException e) { final String msg = "Error persisting conf file " + resourceName; log.error(msg, e); throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, msg, e); } finally { try { IOUtils.fsync(confFile, false); } catch (IOException e) { final String msg = "Error syncing conf file " + resourceName; log.error(msg, e); } } } // This is to verify if this requires to use the schema classloader for classes loaded from // packages private static final ThreadLocal<ResourceLoaderAware> CURRENT_AWARE = new ThreadLocal<>(); public static class SolrFileInputStream extends FilterInputStream { private final long lastModified; public SolrFileInputStream(Path filePath) throws IOException { this(Files.newInputStream(filePath), Files.getLastModifiedTime(filePath).toMillis()); } public SolrFileInputStream(InputStream delegate, long lastModified) { super(delegate); this.lastModified = lastModified; } public long getLastModified() { return lastModified; } } }
googleapis/google-cloud-java
36,799
java-discoveryengine/google-cloud-discoveryengine/src/main/java/com/google/cloud/discoveryengine/v1/stub/ConversationalSearchServiceStubSettings.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.discoveryengine.v1.stub; import static com.google.cloud.discoveryengine.v1.ConversationalSearchServiceClient.ListConversationsPagedResponse; import static com.google.cloud.discoveryengine.v1.ConversationalSearchServiceClient.ListSessionsPagedResponse; import com.google.api.core.ApiFunction; import com.google.api.core.ApiFuture; import com.google.api.core.BetaApi; import com.google.api.core.ObsoleteApi; import com.google.api.gax.core.GaxProperties; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.GrpcTransportChannel; import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.api.gax.httpjson.GaxHttpJsonProperties; import com.google.api.gax.httpjson.HttpJsonTransportChannel; import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider; import com.google.api.gax.retrying.RetrySettings; import com.google.api.gax.rpc.ApiCallContext; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.PageContext; import com.google.api.gax.rpc.PagedCallSettings; import com.google.api.gax.rpc.PagedListDescriptor; import com.google.api.gax.rpc.PagedListResponseFactory; import com.google.api.gax.rpc.ServerStreamingCallSettings; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.discoveryengine.v1.Answer; import com.google.cloud.discoveryengine.v1.AnswerQueryRequest; import com.google.cloud.discoveryengine.v1.AnswerQueryResponse; import com.google.cloud.discoveryengine.v1.Conversation; import com.google.cloud.discoveryengine.v1.ConverseConversationRequest; import com.google.cloud.discoveryengine.v1.ConverseConversationResponse; import com.google.cloud.discoveryengine.v1.CreateConversationRequest; import com.google.cloud.discoveryengine.v1.CreateSessionRequest; import com.google.cloud.discoveryengine.v1.DeleteConversationRequest; import com.google.cloud.discoveryengine.v1.DeleteSessionRequest; import com.google.cloud.discoveryengine.v1.GetAnswerRequest; import com.google.cloud.discoveryengine.v1.GetConversationRequest; import com.google.cloud.discoveryengine.v1.GetSessionRequest; import com.google.cloud.discoveryengine.v1.ListConversationsRequest; import com.google.cloud.discoveryengine.v1.ListConversationsResponse; import com.google.cloud.discoveryengine.v1.ListSessionsRequest; import com.google.cloud.discoveryengine.v1.ListSessionsResponse; import com.google.cloud.discoveryengine.v1.Session; import com.google.cloud.discoveryengine.v1.UpdateConversationRequest; import com.google.cloud.discoveryengine.v1.UpdateSessionRequest; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.protobuf.Empty; import java.io.IOException; import java.time.Duration; import java.util.List; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link ConversationalSearchServiceStub}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (discoveryengine.googleapis.com) and default port (443) are * used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the * [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings) * of converseConversation: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * ConversationalSearchServiceStubSettings.Builder conversationalSearchServiceSettingsBuilder = * ConversationalSearchServiceStubSettings.newBuilder(); * conversationalSearchServiceSettingsBuilder * .converseConversationSettings() * .setRetrySettings( * conversationalSearchServiceSettingsBuilder * .converseConversationSettings() * .getRetrySettings() * .toBuilder() * .setInitialRetryDelayDuration(Duration.ofSeconds(1)) * .setInitialRpcTimeoutDuration(Duration.ofSeconds(5)) * .setMaxAttempts(5) * .setMaxRetryDelayDuration(Duration.ofSeconds(30)) * .setMaxRpcTimeoutDuration(Duration.ofSeconds(60)) * .setRetryDelayMultiplier(1.3) * .setRpcTimeoutMultiplier(1.5) * .setTotalTimeoutDuration(Duration.ofSeconds(300)) * .build()); * ConversationalSearchServiceStubSettings conversationalSearchServiceSettings = * conversationalSearchServiceSettingsBuilder.build(); * }</pre> * * Please refer to the [Client Side Retry * Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for * additional support in setting retries. */ @Generated("by gapic-generator-java") public class ConversationalSearchServiceStubSettings extends StubSettings<ConversationalSearchServiceStubSettings> { /** The default scopes of the service. */ private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES = ImmutableList.<String>builder().add("https://www.googleapis.com/auth/cloud-platform").build(); private final UnaryCallSettings<ConverseConversationRequest, ConverseConversationResponse> converseConversationSettings; private final UnaryCallSettings<CreateConversationRequest, Conversation> createConversationSettings; private final UnaryCallSettings<DeleteConversationRequest, Empty> deleteConversationSettings; private final UnaryCallSettings<UpdateConversationRequest, Conversation> updateConversationSettings; private final UnaryCallSettings<GetConversationRequest, Conversation> getConversationSettings; private final PagedCallSettings< ListConversationsRequest, ListConversationsResponse, ListConversationsPagedResponse> listConversationsSettings; private final UnaryCallSettings<AnswerQueryRequest, AnswerQueryResponse> answerQuerySettings; private final ServerStreamingCallSettings<AnswerQueryRequest, AnswerQueryResponse> streamAnswerQuerySettings; private final UnaryCallSettings<GetAnswerRequest, Answer> getAnswerSettings; private final UnaryCallSettings<CreateSessionRequest, Session> createSessionSettings; private final UnaryCallSettings<DeleteSessionRequest, Empty> deleteSessionSettings; private final UnaryCallSettings<UpdateSessionRequest, Session> updateSessionSettings; private final UnaryCallSettings<GetSessionRequest, Session> getSessionSettings; private final PagedCallSettings< ListSessionsRequest, ListSessionsResponse, ListSessionsPagedResponse> listSessionsSettings; private static final PagedListDescriptor< ListConversationsRequest, ListConversationsResponse, Conversation> LIST_CONVERSATIONS_PAGE_STR_DESC = new PagedListDescriptor< ListConversationsRequest, ListConversationsResponse, Conversation>() { @Override public String emptyToken() { return ""; } @Override public ListConversationsRequest injectToken( ListConversationsRequest payload, String token) { return ListConversationsRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListConversationsRequest injectPageSize( ListConversationsRequest payload, int pageSize) { return ListConversationsRequest.newBuilder(payload).setPageSize(pageSize).build(); } @Override public Integer extractPageSize(ListConversationsRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListConversationsResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<Conversation> extractResources(ListConversationsResponse payload) { return payload.getConversationsList(); } }; private static final PagedListDescriptor<ListSessionsRequest, ListSessionsResponse, Session> LIST_SESSIONS_PAGE_STR_DESC = new PagedListDescriptor<ListSessionsRequest, ListSessionsResponse, Session>() { @Override public String emptyToken() { return ""; } @Override public ListSessionsRequest injectToken(ListSessionsRequest payload, String token) { return ListSessionsRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListSessionsRequest injectPageSize(ListSessionsRequest payload, int pageSize) { return ListSessionsRequest.newBuilder(payload).setPageSize(pageSize).build(); } @Override public Integer extractPageSize(ListSessionsRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListSessionsResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<Session> extractResources(ListSessionsResponse payload) { return payload.getSessionsList(); } }; private static final PagedListResponseFactory< ListConversationsRequest, ListConversationsResponse, ListConversationsPagedResponse> LIST_CONVERSATIONS_PAGE_STR_FACT = new PagedListResponseFactory< ListConversationsRequest, ListConversationsResponse, ListConversationsPagedResponse>() { @Override public ApiFuture<ListConversationsPagedResponse> getFuturePagedResponse( UnaryCallable<ListConversationsRequest, ListConversationsResponse> callable, ListConversationsRequest request, ApiCallContext context, ApiFuture<ListConversationsResponse> futureResponse) { PageContext<ListConversationsRequest, ListConversationsResponse, Conversation> pageContext = PageContext.create( callable, LIST_CONVERSATIONS_PAGE_STR_DESC, request, context); return ListConversationsPagedResponse.createAsync(pageContext, futureResponse); } }; private static final PagedListResponseFactory< ListSessionsRequest, ListSessionsResponse, ListSessionsPagedResponse> LIST_SESSIONS_PAGE_STR_FACT = new PagedListResponseFactory< ListSessionsRequest, ListSessionsResponse, ListSessionsPagedResponse>() { @Override public ApiFuture<ListSessionsPagedResponse> getFuturePagedResponse( UnaryCallable<ListSessionsRequest, ListSessionsResponse> callable, ListSessionsRequest request, ApiCallContext context, ApiFuture<ListSessionsResponse> futureResponse) { PageContext<ListSessionsRequest, ListSessionsResponse, Session> pageContext = PageContext.create(callable, LIST_SESSIONS_PAGE_STR_DESC, request, context); return ListSessionsPagedResponse.createAsync(pageContext, futureResponse); } }; /** Returns the object with the settings used for calls to converseConversation. */ public UnaryCallSettings<ConverseConversationRequest, ConverseConversationResponse> converseConversationSettings() { return converseConversationSettings; } /** Returns the object with the settings used for calls to createConversation. */ public UnaryCallSettings<CreateConversationRequest, Conversation> createConversationSettings() { return createConversationSettings; } /** Returns the object with the settings used for calls to deleteConversation. */ public UnaryCallSettings<DeleteConversationRequest, Empty> deleteConversationSettings() { return deleteConversationSettings; } /** Returns the object with the settings used for calls to updateConversation. */ public UnaryCallSettings<UpdateConversationRequest, Conversation> updateConversationSettings() { return updateConversationSettings; } /** Returns the object with the settings used for calls to getConversation. */ public UnaryCallSettings<GetConversationRequest, Conversation> getConversationSettings() { return getConversationSettings; } /** Returns the object with the settings used for calls to listConversations. */ public PagedCallSettings< ListConversationsRequest, ListConversationsResponse, ListConversationsPagedResponse> listConversationsSettings() { return listConversationsSettings; } /** Returns the object with the settings used for calls to answerQuery. */ public UnaryCallSettings<AnswerQueryRequest, AnswerQueryResponse> answerQuerySettings() { return answerQuerySettings; } /** Returns the object with the settings used for calls to streamAnswerQuery. */ public ServerStreamingCallSettings<AnswerQueryRequest, AnswerQueryResponse> streamAnswerQuerySettings() { return streamAnswerQuerySettings; } /** Returns the object with the settings used for calls to getAnswer. */ public UnaryCallSettings<GetAnswerRequest, Answer> getAnswerSettings() { return getAnswerSettings; } /** Returns the object with the settings used for calls to createSession. */ public UnaryCallSettings<CreateSessionRequest, Session> createSessionSettings() { return createSessionSettings; } /** Returns the object with the settings used for calls to deleteSession. */ public UnaryCallSettings<DeleteSessionRequest, Empty> deleteSessionSettings() { return deleteSessionSettings; } /** Returns the object with the settings used for calls to updateSession. */ public UnaryCallSettings<UpdateSessionRequest, Session> updateSessionSettings() { return updateSessionSettings; } /** Returns the object with the settings used for calls to getSession. */ public UnaryCallSettings<GetSessionRequest, Session> getSessionSettings() { return getSessionSettings; } /** Returns the object with the settings used for calls to listSessions. */ public PagedCallSettings<ListSessionsRequest, ListSessionsResponse, ListSessionsPagedResponse> listSessionsSettings() { return listSessionsSettings; } public ConversationalSearchServiceStub createStub() throws IOException { if (getTransportChannelProvider() .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcConversationalSearchServiceStub.create(this); } if (getTransportChannelProvider() .getTransportName() .equals(HttpJsonTransportChannel.getHttpJsonTransportName())) { return HttpJsonConversationalSearchServiceStub.create(this); } throw new UnsupportedOperationException( String.format( "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns the default service name. */ @Override public String getServiceName() { return "discoveryengine"; } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return InstantiatingExecutorProvider.newBuilder(); } /** Returns the default service endpoint. */ @ObsoleteApi("Use getEndpoint() instead") public static String getDefaultEndpoint() { return "discoveryengine.googleapis.com:443"; } /** Returns the default mTLS service endpoint. */ public static String getDefaultMtlsEndpoint() { return "discoveryengine.mtls.googleapis.com:443"; } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return DEFAULT_SERVICE_SCOPES; } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return GoogleCredentialsProvider.newBuilder() .setScopesToApply(DEFAULT_SERVICE_SCOPES) .setUseJwtAccessWithScope(true); } /** Returns a builder for the default gRPC ChannelProvider for this service. */ public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { return InstantiatingGrpcChannelProvider.newBuilder() .setMaxInboundMessageSize(Integer.MAX_VALUE); } /** Returns a builder for the default REST ChannelProvider for this service. */ @BetaApi public static InstantiatingHttpJsonChannelProvider.Builder defaultHttpJsonTransportProviderBuilder() { return InstantiatingHttpJsonChannelProvider.newBuilder(); } public static TransportChannelProvider defaultTransportChannelProvider() { return defaultGrpcTransportProviderBuilder().build(); } public static ApiClientHeaderProvider.Builder defaultGrpcApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken( "gapic", GaxProperties.getLibraryVersion(ConversationalSearchServiceStubSettings.class)) .setTransportToken( GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion()); } public static ApiClientHeaderProvider.Builder defaultHttpJsonApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken( "gapic", GaxProperties.getLibraryVersion(ConversationalSearchServiceStubSettings.class)) .setTransportToken( GaxHttpJsonProperties.getHttpJsonTokenName(), GaxHttpJsonProperties.getHttpJsonVersion()); } public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return ConversationalSearchServiceStubSettings.defaultGrpcApiClientHeaderProviderBuilder(); } /** Returns a new gRPC builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new REST builder for this class. */ public static Builder newHttpJsonBuilder() { return Builder.createHttpJsonDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected ConversationalSearchServiceStubSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); converseConversationSettings = settingsBuilder.converseConversationSettings().build(); createConversationSettings = settingsBuilder.createConversationSettings().build(); deleteConversationSettings = settingsBuilder.deleteConversationSettings().build(); updateConversationSettings = settingsBuilder.updateConversationSettings().build(); getConversationSettings = settingsBuilder.getConversationSettings().build(); listConversationsSettings = settingsBuilder.listConversationsSettings().build(); answerQuerySettings = settingsBuilder.answerQuerySettings().build(); streamAnswerQuerySettings = settingsBuilder.streamAnswerQuerySettings().build(); getAnswerSettings = settingsBuilder.getAnswerSettings().build(); createSessionSettings = settingsBuilder.createSessionSettings().build(); deleteSessionSettings = settingsBuilder.deleteSessionSettings().build(); updateSessionSettings = settingsBuilder.updateSessionSettings().build(); getSessionSettings = settingsBuilder.getSessionSettings().build(); listSessionsSettings = settingsBuilder.listSessionsSettings().build(); } /** Builder for ConversationalSearchServiceStubSettings. */ public static class Builder extends StubSettings.Builder<ConversationalSearchServiceStubSettings, Builder> { private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders; private final UnaryCallSettings.Builder< ConverseConversationRequest, ConverseConversationResponse> converseConversationSettings; private final UnaryCallSettings.Builder<CreateConversationRequest, Conversation> createConversationSettings; private final UnaryCallSettings.Builder<DeleteConversationRequest, Empty> deleteConversationSettings; private final UnaryCallSettings.Builder<UpdateConversationRequest, Conversation> updateConversationSettings; private final UnaryCallSettings.Builder<GetConversationRequest, Conversation> getConversationSettings; private final PagedCallSettings.Builder< ListConversationsRequest, ListConversationsResponse, ListConversationsPagedResponse> listConversationsSettings; private final UnaryCallSettings.Builder<AnswerQueryRequest, AnswerQueryResponse> answerQuerySettings; private final ServerStreamingCallSettings.Builder<AnswerQueryRequest, AnswerQueryResponse> streamAnswerQuerySettings; private final UnaryCallSettings.Builder<GetAnswerRequest, Answer> getAnswerSettings; private final UnaryCallSettings.Builder<CreateSessionRequest, Session> createSessionSettings; private final UnaryCallSettings.Builder<DeleteSessionRequest, Empty> deleteSessionSettings; private final UnaryCallSettings.Builder<UpdateSessionRequest, Session> updateSessionSettings; private final UnaryCallSettings.Builder<GetSessionRequest, Session> getSessionSettings; private final PagedCallSettings.Builder< ListSessionsRequest, ListSessionsResponse, ListSessionsPagedResponse> listSessionsSettings; private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions = ImmutableMap.builder(); definitions.put( "retry_policy_1_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList(StatusCode.Code.UNAVAILABLE))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS; static { ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder(); RetrySettings settings = null; settings = RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(1000L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelayDuration(Duration.ofMillis(10000L)) .setInitialRpcTimeoutDuration(Duration.ofMillis(30000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ofMillis(30000L)) .setTotalTimeoutDuration(Duration.ofMillis(30000L)) .build(); definitions.put("retry_policy_1_params", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(clientContext); converseConversationSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); createConversationSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); deleteConversationSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); updateConversationSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); getConversationSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); listConversationsSettings = PagedCallSettings.newBuilder(LIST_CONVERSATIONS_PAGE_STR_FACT); answerQuerySettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); streamAnswerQuerySettings = ServerStreamingCallSettings.newBuilder(); getAnswerSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); createSessionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); deleteSessionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); updateSessionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); getSessionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); listSessionsSettings = PagedCallSettings.newBuilder(LIST_SESSIONS_PAGE_STR_FACT); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( converseConversationSettings, createConversationSettings, deleteConversationSettings, updateConversationSettings, getConversationSettings, listConversationsSettings, answerQuerySettings, getAnswerSettings, createSessionSettings, deleteSessionSettings, updateSessionSettings, getSessionSettings, listSessionsSettings); initDefaults(this); } protected Builder(ConversationalSearchServiceStubSettings settings) { super(settings); converseConversationSettings = settings.converseConversationSettings.toBuilder(); createConversationSettings = settings.createConversationSettings.toBuilder(); deleteConversationSettings = settings.deleteConversationSettings.toBuilder(); updateConversationSettings = settings.updateConversationSettings.toBuilder(); getConversationSettings = settings.getConversationSettings.toBuilder(); listConversationsSettings = settings.listConversationsSettings.toBuilder(); answerQuerySettings = settings.answerQuerySettings.toBuilder(); streamAnswerQuerySettings = settings.streamAnswerQuerySettings.toBuilder(); getAnswerSettings = settings.getAnswerSettings.toBuilder(); createSessionSettings = settings.createSessionSettings.toBuilder(); deleteSessionSettings = settings.deleteSessionSettings.toBuilder(); updateSessionSettings = settings.updateSessionSettings.toBuilder(); getSessionSettings = settings.getSessionSettings.toBuilder(); listSessionsSettings = settings.listSessionsSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( converseConversationSettings, createConversationSettings, deleteConversationSettings, updateConversationSettings, getConversationSettings, listConversationsSettings, answerQuerySettings, getAnswerSettings, createSessionSettings, deleteSessionSettings, updateSessionSettings, getSessionSettings, listSessionsSettings); } private static Builder createDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder createHttpJsonDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultHttpJsonTransportProviderBuilder().build()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultHttpJsonApiClientHeaderProviderBuilder().build()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder initDefaults(Builder builder) { builder .converseConversationSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .createConversationSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .deleteConversationSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .updateConversationSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .getConversationSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .listConversationsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .answerQuerySettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .streamAnswerQuerySettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .getAnswerSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .createSessionSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .deleteSessionSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .updateSessionSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .getSessionSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); builder .listSessionsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params")); return builder; } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater); return this; } public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() { return unaryMethodSettingsBuilders; } /** Returns the builder for the settings used for calls to converseConversation. */ public UnaryCallSettings.Builder<ConverseConversationRequest, ConverseConversationResponse> converseConversationSettings() { return converseConversationSettings; } /** Returns the builder for the settings used for calls to createConversation. */ public UnaryCallSettings.Builder<CreateConversationRequest, Conversation> createConversationSettings() { return createConversationSettings; } /** Returns the builder for the settings used for calls to deleteConversation. */ public UnaryCallSettings.Builder<DeleteConversationRequest, Empty> deleteConversationSettings() { return deleteConversationSettings; } /** Returns the builder for the settings used for calls to updateConversation. */ public UnaryCallSettings.Builder<UpdateConversationRequest, Conversation> updateConversationSettings() { return updateConversationSettings; } /** Returns the builder for the settings used for calls to getConversation. */ public UnaryCallSettings.Builder<GetConversationRequest, Conversation> getConversationSettings() { return getConversationSettings; } /** Returns the builder for the settings used for calls to listConversations. */ public PagedCallSettings.Builder< ListConversationsRequest, ListConversationsResponse, ListConversationsPagedResponse> listConversationsSettings() { return listConversationsSettings; } /** Returns the builder for the settings used for calls to answerQuery. */ public UnaryCallSettings.Builder<AnswerQueryRequest, AnswerQueryResponse> answerQuerySettings() { return answerQuerySettings; } /** Returns the builder for the settings used for calls to streamAnswerQuery. */ public ServerStreamingCallSettings.Builder<AnswerQueryRequest, AnswerQueryResponse> streamAnswerQuerySettings() { return streamAnswerQuerySettings; } /** Returns the builder for the settings used for calls to getAnswer. */ public UnaryCallSettings.Builder<GetAnswerRequest, Answer> getAnswerSettings() { return getAnswerSettings; } /** Returns the builder for the settings used for calls to createSession. */ public UnaryCallSettings.Builder<CreateSessionRequest, Session> createSessionSettings() { return createSessionSettings; } /** Returns the builder for the settings used for calls to deleteSession. */ public UnaryCallSettings.Builder<DeleteSessionRequest, Empty> deleteSessionSettings() { return deleteSessionSettings; } /** Returns the builder for the settings used for calls to updateSession. */ public UnaryCallSettings.Builder<UpdateSessionRequest, Session> updateSessionSettings() { return updateSessionSettings; } /** Returns the builder for the settings used for calls to getSession. */ public UnaryCallSettings.Builder<GetSessionRequest, Session> getSessionSettings() { return getSessionSettings; } /** Returns the builder for the settings used for calls to listSessions. */ public PagedCallSettings.Builder< ListSessionsRequest, ListSessionsResponse, ListSessionsPagedResponse> listSessionsSettings() { return listSessionsSettings; } @Override public ConversationalSearchServiceStubSettings build() throws IOException { return new ConversationalSearchServiceStubSettings(this); } } }
googleapis/google-api-java-client-services
36,582
clients/google-api-services-appengine/v1beta/1.30.1/com/google/api/services/appengine/model/Version.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.appengine.model; /** * A Version resource is a specific set of source code and configuration files that are deployed * into a service. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the App Engine Admin API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class Version extends com.google.api.client.json.GenericJson { /** * Serving configuration for Google Cloud Endpoints * (https://cloud.google.com/appengine/docs/python/endpoints/).Only returned in GET requests if * view=FULL is set. * The value may be {@code null}. */ @com.google.api.client.util.Key private ApiConfigHandler apiConfig; /** * app_engine_apis allows Second Generation runtimes to access the App Engine APIs. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean appEngineApis; /** * Automatic scaling is based on request rate, response latencies, and other application metrics. * Instances are dynamically created and destroyed as needed in order to handle traffic. * The value may be {@code null}. */ @com.google.api.client.util.Key private AutomaticScaling automaticScaling; /** * A service with basic scaling will create an instance when the application receives a request. * The instance will be turned down when the app becomes idle. Basic scaling is ideal for work * that is intermittent or driven by user activity. * The value may be {@code null}. */ @com.google.api.client.util.Key private BasicScaling basicScaling; /** * Metadata settings that are supplied to this version to enable beta runtime features. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.Map<String, java.lang.String> betaSettings; /** * Environment variables available to the build environment.Only returned in GET requests if * view=FULL is set. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.Map<String, java.lang.String> buildEnvVariables; /** * Time that this version was created.@OutputOnly * The value may be {@code null}. */ @com.google.api.client.util.Key private String createTime; /** * Email address of the user who created this version.@OutputOnly * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String createdBy; /** * Duration that static files should be cached by web proxies and browsers. Only applicable if the * corresponding StaticFilesHandler (https://cloud.google.com/appengine/docs/admin- * api/reference/rest/v1beta/apps.services.versions#StaticFilesHandler) does not specify its own * expiration time.Only returned in GET requests if view=FULL is set. * The value may be {@code null}. */ @com.google.api.client.util.Key private String defaultExpiration; /** * Code and application artifacts that make up this version.Only returned in GET requests if * view=FULL is set. * The value may be {@code null}. */ @com.google.api.client.util.Key private Deployment deployment; /** * Total size in bytes of all the files that are included in this version and currently hosted on * the App Engine disk.@OutputOnly * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long diskUsageBytes; /** * Cloud Endpoints configuration.If endpoints_api_service is set, the Cloud Endpoints Extensible * Service Proxy will be provided to serve the API implemented by the app. * The value may be {@code null}. */ @com.google.api.client.util.Key private EndpointsApiService endpointsApiService; /** * The entrypoint for the application. * The value may be {@code null}. */ @com.google.api.client.util.Key private Entrypoint entrypoint; /** * App Engine execution environment for this version.Defaults to standard. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String env; /** * Environment variables available to the application.Only returned in GET requests if view=FULL * is set. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.Map<String, java.lang.String> envVariables; /** * Custom static error pages. Limited to 10KB per page.Only returned in GET requests if view=FULL * is set. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<ErrorHandler> errorHandlers; static { // hack to force ProGuard to consider ErrorHandler used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(ErrorHandler.class); } /** * An ordered list of URL-matching patterns that should be applied to incoming requests. The first * matching URL handles the request and other request handlers are not attempted.Only returned in * GET requests if view=FULL is set. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<UrlMap> handlers; static { // hack to force ProGuard to consider UrlMap used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(UrlMap.class); } /** * Configures health checking for instances. Unhealthy instances are stopped and replaced with new * instances. Only applicable in the App Engine flexible environment.Only returned in GET requests * if view=FULL is set. * The value may be {@code null}. */ @com.google.api.client.util.Key private HealthCheck healthCheck; /** * Relative name of the version within the service. Example: v1. Version names can contain only * lowercase letters, numbers, or hyphens. Reserved names: "default", "latest", and any name with * the prefix "ah-". * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String id; /** * Before an application can receive email or XMPP messages, the application must be configured to * enable the service. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> inboundServices; /** * Instance class that is used to run this version. Valid values are: AutomaticScaling: F1, F2, * F4, F4_1G ManualScaling or BasicScaling: B1, B2, B4, B8, B4_1GDefaults to F1 for * AutomaticScaling and B1 for ManualScaling or BasicScaling. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String instanceClass; /** * Configuration for third-party Python runtime libraries that are required by the * application.Only returned in GET requests if view=FULL is set. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<Library> libraries; static { // hack to force ProGuard to consider Library used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(Library.class); } /** * Configures liveness health checking for instances. Unhealthy instances are stopped and replaced * with new instancesOnly returned in GET requests if view=FULL is set. * The value may be {@code null}. */ @com.google.api.client.util.Key private LivenessCheck livenessCheck; /** * A service with manual scaling runs continuously, allowing you to perform complex initialization * and rely on the state of its memory over time. Manually scaled versions are sometimes referred * to as "backends". * The value may be {@code null}. */ @com.google.api.client.util.Key private ManualScaling manualScaling; /** * Full path to the Version resource in the API. Example: * apps/myapp/services/default/versions/v1.@OutputOnly * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String name; /** * Extra network settings. Only applicable in the App Engine flexible environment. * The value may be {@code null}. */ @com.google.api.client.util.Key private Network network; /** * Files that match this pattern will not be built into this version. Only applicable for Go * runtimes.Only returned in GET requests if view=FULL is set. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String nobuildFilesRegex; /** * Configures readiness health checking for instances. Unhealthy instances are not put into the * backend traffic rotation.Only returned in GET requests if view=FULL is set. * The value may be {@code null}. */ @com.google.api.client.util.Key private ReadinessCheck readinessCheck; /** * Machine resources for this version. Only applicable in the App Engine flexible environment. * The value may be {@code null}. */ @com.google.api.client.util.Key private Resources resources; /** * Desired runtime. Example: python27. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String runtime; /** * The version of the API in the given runtime environment. Please see the app.yaml reference for * valid values at https://cloud.google.com/appengine/docs/standard//config/appref * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String runtimeApiVersion; /** * The channel of the runtime to use. Only available for some runtimes. Defaults to the default * channel. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String runtimeChannel; /** * The path or name of the app's main executable. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String runtimeMainExecutablePath; /** * Current serving status of this version. Only the versions with a SERVING status create * instances and can be billed.SERVING_STATUS_UNSPECIFIED is an invalid value. Defaults to * SERVING. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String servingStatus; /** * Whether multiple requests can be dispatched to this version at once. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean threadsafe; /** * Serving URL for this version. Example: "https://myversion-dot-myservice-dot- * myapp.appspot.com"@OutputOnly * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String versionUrl; /** * Whether to deploy this version in a container on a virtual machine. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean vm; /** * Enables VPC connectivity for standard apps. * The value may be {@code null}. */ @com.google.api.client.util.Key private VpcAccessConnector vpcAccessConnector; /** * The Google Compute Engine zones that are supported by this version in the App Engine flexible * environment. Deprecated. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> zones; /** * Serving configuration for Google Cloud Endpoints * (https://cloud.google.com/appengine/docs/python/endpoints/).Only returned in GET requests if * view=FULL is set. * @return value or {@code null} for none */ public ApiConfigHandler getApiConfig() { return apiConfig; } /** * Serving configuration for Google Cloud Endpoints * (https://cloud.google.com/appengine/docs/python/endpoints/).Only returned in GET requests if * view=FULL is set. * @param apiConfig apiConfig or {@code null} for none */ public Version setApiConfig(ApiConfigHandler apiConfig) { this.apiConfig = apiConfig; return this; } /** * app_engine_apis allows Second Generation runtimes to access the App Engine APIs. * @return value or {@code null} for none */ public java.lang.Boolean getAppEngineApis() { return appEngineApis; } /** * app_engine_apis allows Second Generation runtimes to access the App Engine APIs. * @param appEngineApis appEngineApis or {@code null} for none */ public Version setAppEngineApis(java.lang.Boolean appEngineApis) { this.appEngineApis = appEngineApis; return this; } /** * Automatic scaling is based on request rate, response latencies, and other application metrics. * Instances are dynamically created and destroyed as needed in order to handle traffic. * @return value or {@code null} for none */ public AutomaticScaling getAutomaticScaling() { return automaticScaling; } /** * Automatic scaling is based on request rate, response latencies, and other application metrics. * Instances are dynamically created and destroyed as needed in order to handle traffic. * @param automaticScaling automaticScaling or {@code null} for none */ public Version setAutomaticScaling(AutomaticScaling automaticScaling) { this.automaticScaling = automaticScaling; return this; } /** * A service with basic scaling will create an instance when the application receives a request. * The instance will be turned down when the app becomes idle. Basic scaling is ideal for work * that is intermittent or driven by user activity. * @return value or {@code null} for none */ public BasicScaling getBasicScaling() { return basicScaling; } /** * A service with basic scaling will create an instance when the application receives a request. * The instance will be turned down when the app becomes idle. Basic scaling is ideal for work * that is intermittent or driven by user activity. * @param basicScaling basicScaling or {@code null} for none */ public Version setBasicScaling(BasicScaling basicScaling) { this.basicScaling = basicScaling; return this; } /** * Metadata settings that are supplied to this version to enable beta runtime features. * @return value or {@code null} for none */ public java.util.Map<String, java.lang.String> getBetaSettings() { return betaSettings; } /** * Metadata settings that are supplied to this version to enable beta runtime features. * @param betaSettings betaSettings or {@code null} for none */ public Version setBetaSettings(java.util.Map<String, java.lang.String> betaSettings) { this.betaSettings = betaSettings; return this; } /** * Environment variables available to the build environment.Only returned in GET requests if * view=FULL is set. * @return value or {@code null} for none */ public java.util.Map<String, java.lang.String> getBuildEnvVariables() { return buildEnvVariables; } /** * Environment variables available to the build environment.Only returned in GET requests if * view=FULL is set. * @param buildEnvVariables buildEnvVariables or {@code null} for none */ public Version setBuildEnvVariables(java.util.Map<String, java.lang.String> buildEnvVariables) { this.buildEnvVariables = buildEnvVariables; return this; } /** * Time that this version was created.@OutputOnly * @return value or {@code null} for none */ public String getCreateTime() { return createTime; } /** * Time that this version was created.@OutputOnly * @param createTime createTime or {@code null} for none */ public Version setCreateTime(String createTime) { this.createTime = createTime; return this; } /** * Email address of the user who created this version.@OutputOnly * @return value or {@code null} for none */ public java.lang.String getCreatedBy() { return createdBy; } /** * Email address of the user who created this version.@OutputOnly * @param createdBy createdBy or {@code null} for none */ public Version setCreatedBy(java.lang.String createdBy) { this.createdBy = createdBy; return this; } /** * Duration that static files should be cached by web proxies and browsers. Only applicable if the * corresponding StaticFilesHandler (https://cloud.google.com/appengine/docs/admin- * api/reference/rest/v1beta/apps.services.versions#StaticFilesHandler) does not specify its own * expiration time.Only returned in GET requests if view=FULL is set. * @return value or {@code null} for none */ public String getDefaultExpiration() { return defaultExpiration; } /** * Duration that static files should be cached by web proxies and browsers. Only applicable if the * corresponding StaticFilesHandler (https://cloud.google.com/appengine/docs/admin- * api/reference/rest/v1beta/apps.services.versions#StaticFilesHandler) does not specify its own * expiration time.Only returned in GET requests if view=FULL is set. * @param defaultExpiration defaultExpiration or {@code null} for none */ public Version setDefaultExpiration(String defaultExpiration) { this.defaultExpiration = defaultExpiration; return this; } /** * Code and application artifacts that make up this version.Only returned in GET requests if * view=FULL is set. * @return value or {@code null} for none */ public Deployment getDeployment() { return deployment; } /** * Code and application artifacts that make up this version.Only returned in GET requests if * view=FULL is set. * @param deployment deployment or {@code null} for none */ public Version setDeployment(Deployment deployment) { this.deployment = deployment; return this; } /** * Total size in bytes of all the files that are included in this version and currently hosted on * the App Engine disk.@OutputOnly * @return value or {@code null} for none */ public java.lang.Long getDiskUsageBytes() { return diskUsageBytes; } /** * Total size in bytes of all the files that are included in this version and currently hosted on * the App Engine disk.@OutputOnly * @param diskUsageBytes diskUsageBytes or {@code null} for none */ public Version setDiskUsageBytes(java.lang.Long diskUsageBytes) { this.diskUsageBytes = diskUsageBytes; return this; } /** * Cloud Endpoints configuration.If endpoints_api_service is set, the Cloud Endpoints Extensible * Service Proxy will be provided to serve the API implemented by the app. * @return value or {@code null} for none */ public EndpointsApiService getEndpointsApiService() { return endpointsApiService; } /** * Cloud Endpoints configuration.If endpoints_api_service is set, the Cloud Endpoints Extensible * Service Proxy will be provided to serve the API implemented by the app. * @param endpointsApiService endpointsApiService or {@code null} for none */ public Version setEndpointsApiService(EndpointsApiService endpointsApiService) { this.endpointsApiService = endpointsApiService; return this; } /** * The entrypoint for the application. * @return value or {@code null} for none */ public Entrypoint getEntrypoint() { return entrypoint; } /** * The entrypoint for the application. * @param entrypoint entrypoint or {@code null} for none */ public Version setEntrypoint(Entrypoint entrypoint) { this.entrypoint = entrypoint; return this; } /** * App Engine execution environment for this version.Defaults to standard. * @return value or {@code null} for none */ public java.lang.String getEnv() { return env; } /** * App Engine execution environment for this version.Defaults to standard. * @param env env or {@code null} for none */ public Version setEnv(java.lang.String env) { this.env = env; return this; } /** * Environment variables available to the application.Only returned in GET requests if view=FULL * is set. * @return value or {@code null} for none */ public java.util.Map<String, java.lang.String> getEnvVariables() { return envVariables; } /** * Environment variables available to the application.Only returned in GET requests if view=FULL * is set. * @param envVariables envVariables or {@code null} for none */ public Version setEnvVariables(java.util.Map<String, java.lang.String> envVariables) { this.envVariables = envVariables; return this; } /** * Custom static error pages. Limited to 10KB per page.Only returned in GET requests if view=FULL * is set. * @return value or {@code null} for none */ public java.util.List<ErrorHandler> getErrorHandlers() { return errorHandlers; } /** * Custom static error pages. Limited to 10KB per page.Only returned in GET requests if view=FULL * is set. * @param errorHandlers errorHandlers or {@code null} for none */ public Version setErrorHandlers(java.util.List<ErrorHandler> errorHandlers) { this.errorHandlers = errorHandlers; return this; } /** * An ordered list of URL-matching patterns that should be applied to incoming requests. The first * matching URL handles the request and other request handlers are not attempted.Only returned in * GET requests if view=FULL is set. * @return value or {@code null} for none */ public java.util.List<UrlMap> getHandlers() { return handlers; } /** * An ordered list of URL-matching patterns that should be applied to incoming requests. The first * matching URL handles the request and other request handlers are not attempted.Only returned in * GET requests if view=FULL is set. * @param handlers handlers or {@code null} for none */ public Version setHandlers(java.util.List<UrlMap> handlers) { this.handlers = handlers; return this; } /** * Configures health checking for instances. Unhealthy instances are stopped and replaced with new * instances. Only applicable in the App Engine flexible environment.Only returned in GET requests * if view=FULL is set. * @return value or {@code null} for none */ public HealthCheck getHealthCheck() { return healthCheck; } /** * Configures health checking for instances. Unhealthy instances are stopped and replaced with new * instances. Only applicable in the App Engine flexible environment.Only returned in GET requests * if view=FULL is set. * @param healthCheck healthCheck or {@code null} for none */ public Version setHealthCheck(HealthCheck healthCheck) { this.healthCheck = healthCheck; return this; } /** * Relative name of the version within the service. Example: v1. Version names can contain only * lowercase letters, numbers, or hyphens. Reserved names: "default", "latest", and any name with * the prefix "ah-". * @return value or {@code null} for none */ public java.lang.String getId() { return id; } /** * Relative name of the version within the service. Example: v1. Version names can contain only * lowercase letters, numbers, or hyphens. Reserved names: "default", "latest", and any name with * the prefix "ah-". * @param id id or {@code null} for none */ public Version setId(java.lang.String id) { this.id = id; return this; } /** * Before an application can receive email or XMPP messages, the application must be configured to * enable the service. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getInboundServices() { return inboundServices; } /** * Before an application can receive email or XMPP messages, the application must be configured to * enable the service. * @param inboundServices inboundServices or {@code null} for none */ public Version setInboundServices(java.util.List<java.lang.String> inboundServices) { this.inboundServices = inboundServices; return this; } /** * Instance class that is used to run this version. Valid values are: AutomaticScaling: F1, F2, * F4, F4_1G ManualScaling or BasicScaling: B1, B2, B4, B8, B4_1GDefaults to F1 for * AutomaticScaling and B1 for ManualScaling or BasicScaling. * @return value or {@code null} for none */ public java.lang.String getInstanceClass() { return instanceClass; } /** * Instance class that is used to run this version. Valid values are: AutomaticScaling: F1, F2, * F4, F4_1G ManualScaling or BasicScaling: B1, B2, B4, B8, B4_1GDefaults to F1 for * AutomaticScaling and B1 for ManualScaling or BasicScaling. * @param instanceClass instanceClass or {@code null} for none */ public Version setInstanceClass(java.lang.String instanceClass) { this.instanceClass = instanceClass; return this; } /** * Configuration for third-party Python runtime libraries that are required by the * application.Only returned in GET requests if view=FULL is set. * @return value or {@code null} for none */ public java.util.List<Library> getLibraries() { return libraries; } /** * Configuration for third-party Python runtime libraries that are required by the * application.Only returned in GET requests if view=FULL is set. * @param libraries libraries or {@code null} for none */ public Version setLibraries(java.util.List<Library> libraries) { this.libraries = libraries; return this; } /** * Configures liveness health checking for instances. Unhealthy instances are stopped and replaced * with new instancesOnly returned in GET requests if view=FULL is set. * @return value or {@code null} for none */ public LivenessCheck getLivenessCheck() { return livenessCheck; } /** * Configures liveness health checking for instances. Unhealthy instances are stopped and replaced * with new instancesOnly returned in GET requests if view=FULL is set. * @param livenessCheck livenessCheck or {@code null} for none */ public Version setLivenessCheck(LivenessCheck livenessCheck) { this.livenessCheck = livenessCheck; return this; } /** * A service with manual scaling runs continuously, allowing you to perform complex initialization * and rely on the state of its memory over time. Manually scaled versions are sometimes referred * to as "backends". * @return value or {@code null} for none */ public ManualScaling getManualScaling() { return manualScaling; } /** * A service with manual scaling runs continuously, allowing you to perform complex initialization * and rely on the state of its memory over time. Manually scaled versions are sometimes referred * to as "backends". * @param manualScaling manualScaling or {@code null} for none */ public Version setManualScaling(ManualScaling manualScaling) { this.manualScaling = manualScaling; return this; } /** * Full path to the Version resource in the API. Example: * apps/myapp/services/default/versions/v1.@OutputOnly * @return value or {@code null} for none */ public java.lang.String getName() { return name; } /** * Full path to the Version resource in the API. Example: * apps/myapp/services/default/versions/v1.@OutputOnly * @param name name or {@code null} for none */ public Version setName(java.lang.String name) { this.name = name; return this; } /** * Extra network settings. Only applicable in the App Engine flexible environment. * @return value or {@code null} for none */ public Network getNetwork() { return network; } /** * Extra network settings. Only applicable in the App Engine flexible environment. * @param network network or {@code null} for none */ public Version setNetwork(Network network) { this.network = network; return this; } /** * Files that match this pattern will not be built into this version. Only applicable for Go * runtimes.Only returned in GET requests if view=FULL is set. * @return value or {@code null} for none */ public java.lang.String getNobuildFilesRegex() { return nobuildFilesRegex; } /** * Files that match this pattern will not be built into this version. Only applicable for Go * runtimes.Only returned in GET requests if view=FULL is set. * @param nobuildFilesRegex nobuildFilesRegex or {@code null} for none */ public Version setNobuildFilesRegex(java.lang.String nobuildFilesRegex) { this.nobuildFilesRegex = nobuildFilesRegex; return this; } /** * Configures readiness health checking for instances. Unhealthy instances are not put into the * backend traffic rotation.Only returned in GET requests if view=FULL is set. * @return value or {@code null} for none */ public ReadinessCheck getReadinessCheck() { return readinessCheck; } /** * Configures readiness health checking for instances. Unhealthy instances are not put into the * backend traffic rotation.Only returned in GET requests if view=FULL is set. * @param readinessCheck readinessCheck or {@code null} for none */ public Version setReadinessCheck(ReadinessCheck readinessCheck) { this.readinessCheck = readinessCheck; return this; } /** * Machine resources for this version. Only applicable in the App Engine flexible environment. * @return value or {@code null} for none */ public Resources getResources() { return resources; } /** * Machine resources for this version. Only applicable in the App Engine flexible environment. * @param resources resources or {@code null} for none */ public Version setResources(Resources resources) { this.resources = resources; return this; } /** * Desired runtime. Example: python27. * @return value or {@code null} for none */ public java.lang.String getRuntime() { return runtime; } /** * Desired runtime. Example: python27. * @param runtime runtime or {@code null} for none */ public Version setRuntime(java.lang.String runtime) { this.runtime = runtime; return this; } /** * The version of the API in the given runtime environment. Please see the app.yaml reference for * valid values at https://cloud.google.com/appengine/docs/standard//config/appref * @return value or {@code null} for none */ public java.lang.String getRuntimeApiVersion() { return runtimeApiVersion; } /** * The version of the API in the given runtime environment. Please see the app.yaml reference for * valid values at https://cloud.google.com/appengine/docs/standard//config/appref * @param runtimeApiVersion runtimeApiVersion or {@code null} for none */ public Version setRuntimeApiVersion(java.lang.String runtimeApiVersion) { this.runtimeApiVersion = runtimeApiVersion; return this; } /** * The channel of the runtime to use. Only available for some runtimes. Defaults to the default * channel. * @return value or {@code null} for none */ public java.lang.String getRuntimeChannel() { return runtimeChannel; } /** * The channel of the runtime to use. Only available for some runtimes. Defaults to the default * channel. * @param runtimeChannel runtimeChannel or {@code null} for none */ public Version setRuntimeChannel(java.lang.String runtimeChannel) { this.runtimeChannel = runtimeChannel; return this; } /** * The path or name of the app's main executable. * @return value or {@code null} for none */ public java.lang.String getRuntimeMainExecutablePath() { return runtimeMainExecutablePath; } /** * The path or name of the app's main executable. * @param runtimeMainExecutablePath runtimeMainExecutablePath or {@code null} for none */ public Version setRuntimeMainExecutablePath(java.lang.String runtimeMainExecutablePath) { this.runtimeMainExecutablePath = runtimeMainExecutablePath; return this; } /** * Current serving status of this version. Only the versions with a SERVING status create * instances and can be billed.SERVING_STATUS_UNSPECIFIED is an invalid value. Defaults to * SERVING. * @return value or {@code null} for none */ public java.lang.String getServingStatus() { return servingStatus; } /** * Current serving status of this version. Only the versions with a SERVING status create * instances and can be billed.SERVING_STATUS_UNSPECIFIED is an invalid value. Defaults to * SERVING. * @param servingStatus servingStatus or {@code null} for none */ public Version setServingStatus(java.lang.String servingStatus) { this.servingStatus = servingStatus; return this; } /** * Whether multiple requests can be dispatched to this version at once. * @return value or {@code null} for none */ public java.lang.Boolean getThreadsafe() { return threadsafe; } /** * Whether multiple requests can be dispatched to this version at once. * @param threadsafe threadsafe or {@code null} for none */ public Version setThreadsafe(java.lang.Boolean threadsafe) { this.threadsafe = threadsafe; return this; } /** * Serving URL for this version. Example: "https://myversion-dot-myservice-dot- * myapp.appspot.com"@OutputOnly * @return value or {@code null} for none */ public java.lang.String getVersionUrl() { return versionUrl; } /** * Serving URL for this version. Example: "https://myversion-dot-myservice-dot- * myapp.appspot.com"@OutputOnly * @param versionUrl versionUrl or {@code null} for none */ public Version setVersionUrl(java.lang.String versionUrl) { this.versionUrl = versionUrl; return this; } /** * Whether to deploy this version in a container on a virtual machine. * @return value or {@code null} for none */ public java.lang.Boolean getVm() { return vm; } /** * Whether to deploy this version in a container on a virtual machine. * @param vm vm or {@code null} for none */ public Version setVm(java.lang.Boolean vm) { this.vm = vm; return this; } /** * Enables VPC connectivity for standard apps. * @return value or {@code null} for none */ public VpcAccessConnector getVpcAccessConnector() { return vpcAccessConnector; } /** * Enables VPC connectivity for standard apps. * @param vpcAccessConnector vpcAccessConnector or {@code null} for none */ public Version setVpcAccessConnector(VpcAccessConnector vpcAccessConnector) { this.vpcAccessConnector = vpcAccessConnector; return this; } /** * The Google Compute Engine zones that are supported by this version in the App Engine flexible * environment. Deprecated. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getZones() { return zones; } /** * The Google Compute Engine zones that are supported by this version in the App Engine flexible * environment. Deprecated. * @param zones zones or {@code null} for none */ public Version setZones(java.util.List<java.lang.String> zones) { this.zones = zones; return this; } @Override public Version set(String fieldName, Object value) { return (Version) super.set(fieldName, value); } @Override public Version clone() { return (Version) super.clone(); } }
googleapis/google-cloud-java
36,553
java-visionai/proto-google-cloud-visionai-v1/src/main/java/com/google/cloud/visionai/v1/ListAnnotationsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/visionai/v1/warehouse.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.visionai.v1; /** * * * <pre> * Request message for ListAnnotations API. * </pre> * * Protobuf type {@code google.cloud.visionai.v1.ListAnnotationsResponse} */ public final class ListAnnotationsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.visionai.v1.ListAnnotationsResponse) ListAnnotationsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListAnnotationsResponse.newBuilder() to construct. private ListAnnotationsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListAnnotationsResponse() { annotations_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListAnnotationsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_ListAnnotationsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_ListAnnotationsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.visionai.v1.ListAnnotationsResponse.class, com.google.cloud.visionai.v1.ListAnnotationsResponse.Builder.class); } public static final int ANNOTATIONS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.visionai.v1.Annotation> annotations_; /** * * * <pre> * The annotations from the specified asset. * </pre> * * <code>repeated .google.cloud.visionai.v1.Annotation annotations = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.visionai.v1.Annotation> getAnnotationsList() { return annotations_; } /** * * * <pre> * The annotations from the specified asset. * </pre> * * <code>repeated .google.cloud.visionai.v1.Annotation annotations = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.visionai.v1.AnnotationOrBuilder> getAnnotationsOrBuilderList() { return annotations_; } /** * * * <pre> * The annotations from the specified asset. * </pre> * * <code>repeated .google.cloud.visionai.v1.Annotation annotations = 1;</code> */ @java.lang.Override public int getAnnotationsCount() { return annotations_.size(); } /** * * * <pre> * The annotations from the specified asset. * </pre> * * <code>repeated .google.cloud.visionai.v1.Annotation annotations = 1;</code> */ @java.lang.Override public com.google.cloud.visionai.v1.Annotation getAnnotations(int index) { return annotations_.get(index); } /** * * * <pre> * The annotations from the specified asset. * </pre> * * <code>repeated .google.cloud.visionai.v1.Annotation annotations = 1;</code> */ @java.lang.Override public com.google.cloud.visionai.v1.AnnotationOrBuilder getAnnotationsOrBuilder(int index) { return annotations_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < annotations_.size(); i++) { output.writeMessage(1, annotations_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < annotations_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, annotations_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.visionai.v1.ListAnnotationsResponse)) { return super.equals(obj); } com.google.cloud.visionai.v1.ListAnnotationsResponse other = (com.google.cloud.visionai.v1.ListAnnotationsResponse) obj; if (!getAnnotationsList().equals(other.getAnnotationsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getAnnotationsCount() > 0) { hash = (37 * hash) + ANNOTATIONS_FIELD_NUMBER; hash = (53 * hash) + getAnnotationsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.visionai.v1.ListAnnotationsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.ListAnnotationsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.ListAnnotationsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.ListAnnotationsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.ListAnnotationsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.ListAnnotationsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.ListAnnotationsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.ListAnnotationsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.visionai.v1.ListAnnotationsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.ListAnnotationsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.visionai.v1.ListAnnotationsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.ListAnnotationsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.visionai.v1.ListAnnotationsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for ListAnnotations API. * </pre> * * Protobuf type {@code google.cloud.visionai.v1.ListAnnotationsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.visionai.v1.ListAnnotationsResponse) com.google.cloud.visionai.v1.ListAnnotationsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_ListAnnotationsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_ListAnnotationsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.visionai.v1.ListAnnotationsResponse.class, com.google.cloud.visionai.v1.ListAnnotationsResponse.Builder.class); } // Construct using com.google.cloud.visionai.v1.ListAnnotationsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (annotationsBuilder_ == null) { annotations_ = java.util.Collections.emptyList(); } else { annotations_ = null; annotationsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_ListAnnotationsResponse_descriptor; } @java.lang.Override public com.google.cloud.visionai.v1.ListAnnotationsResponse getDefaultInstanceForType() { return com.google.cloud.visionai.v1.ListAnnotationsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.visionai.v1.ListAnnotationsResponse build() { com.google.cloud.visionai.v1.ListAnnotationsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.visionai.v1.ListAnnotationsResponse buildPartial() { com.google.cloud.visionai.v1.ListAnnotationsResponse result = new com.google.cloud.visionai.v1.ListAnnotationsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.visionai.v1.ListAnnotationsResponse result) { if (annotationsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { annotations_ = java.util.Collections.unmodifiableList(annotations_); bitField0_ = (bitField0_ & ~0x00000001); } result.annotations_ = annotations_; } else { result.annotations_ = annotationsBuilder_.build(); } } private void buildPartial0(com.google.cloud.visionai.v1.ListAnnotationsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.visionai.v1.ListAnnotationsResponse) { return mergeFrom((com.google.cloud.visionai.v1.ListAnnotationsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.visionai.v1.ListAnnotationsResponse other) { if (other == com.google.cloud.visionai.v1.ListAnnotationsResponse.getDefaultInstance()) return this; if (annotationsBuilder_ == null) { if (!other.annotations_.isEmpty()) { if (annotations_.isEmpty()) { annotations_ = other.annotations_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureAnnotationsIsMutable(); annotations_.addAll(other.annotations_); } onChanged(); } } else { if (!other.annotations_.isEmpty()) { if (annotationsBuilder_.isEmpty()) { annotationsBuilder_.dispose(); annotationsBuilder_ = null; annotations_ = other.annotations_; bitField0_ = (bitField0_ & ~0x00000001); annotationsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getAnnotationsFieldBuilder() : null; } else { annotationsBuilder_.addAllMessages(other.annotations_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.visionai.v1.Annotation m = input.readMessage( com.google.cloud.visionai.v1.Annotation.parser(), extensionRegistry); if (annotationsBuilder_ == null) { ensureAnnotationsIsMutable(); annotations_.add(m); } else { annotationsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.visionai.v1.Annotation> annotations_ = java.util.Collections.emptyList(); private void ensureAnnotationsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { annotations_ = new java.util.ArrayList<com.google.cloud.visionai.v1.Annotation>(annotations_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.visionai.v1.Annotation, com.google.cloud.visionai.v1.Annotation.Builder, com.google.cloud.visionai.v1.AnnotationOrBuilder> annotationsBuilder_; /** * * * <pre> * The annotations from the specified asset. * </pre> * * <code>repeated .google.cloud.visionai.v1.Annotation annotations = 1;</code> */ public java.util.List<com.google.cloud.visionai.v1.Annotation> getAnnotationsList() { if (annotationsBuilder_ == null) { return java.util.Collections.unmodifiableList(annotations_); } else { return annotationsBuilder_.getMessageList(); } } /** * * * <pre> * The annotations from the specified asset. * </pre> * * <code>repeated .google.cloud.visionai.v1.Annotation annotations = 1;</code> */ public int getAnnotationsCount() { if (annotationsBuilder_ == null) { return annotations_.size(); } else { return annotationsBuilder_.getCount(); } } /** * * * <pre> * The annotations from the specified asset. * </pre> * * <code>repeated .google.cloud.visionai.v1.Annotation annotations = 1;</code> */ public com.google.cloud.visionai.v1.Annotation getAnnotations(int index) { if (annotationsBuilder_ == null) { return annotations_.get(index); } else { return annotationsBuilder_.getMessage(index); } } /** * * * <pre> * The annotations from the specified asset. * </pre> * * <code>repeated .google.cloud.visionai.v1.Annotation annotations = 1;</code> */ public Builder setAnnotations(int index, com.google.cloud.visionai.v1.Annotation value) { if (annotationsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAnnotationsIsMutable(); annotations_.set(index, value); onChanged(); } else { annotationsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The annotations from the specified asset. * </pre> * * <code>repeated .google.cloud.visionai.v1.Annotation annotations = 1;</code> */ public Builder setAnnotations( int index, com.google.cloud.visionai.v1.Annotation.Builder builderForValue) { if (annotationsBuilder_ == null) { ensureAnnotationsIsMutable(); annotations_.set(index, builderForValue.build()); onChanged(); } else { annotationsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The annotations from the specified asset. * </pre> * * <code>repeated .google.cloud.visionai.v1.Annotation annotations = 1;</code> */ public Builder addAnnotations(com.google.cloud.visionai.v1.Annotation value) { if (annotationsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAnnotationsIsMutable(); annotations_.add(value); onChanged(); } else { annotationsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The annotations from the specified asset. * </pre> * * <code>repeated .google.cloud.visionai.v1.Annotation annotations = 1;</code> */ public Builder addAnnotations(int index, com.google.cloud.visionai.v1.Annotation value) { if (annotationsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAnnotationsIsMutable(); annotations_.add(index, value); onChanged(); } else { annotationsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The annotations from the specified asset. * </pre> * * <code>repeated .google.cloud.visionai.v1.Annotation annotations = 1;</code> */ public Builder addAnnotations(com.google.cloud.visionai.v1.Annotation.Builder builderForValue) { if (annotationsBuilder_ == null) { ensureAnnotationsIsMutable(); annotations_.add(builderForValue.build()); onChanged(); } else { annotationsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The annotations from the specified asset. * </pre> * * <code>repeated .google.cloud.visionai.v1.Annotation annotations = 1;</code> */ public Builder addAnnotations( int index, com.google.cloud.visionai.v1.Annotation.Builder builderForValue) { if (annotationsBuilder_ == null) { ensureAnnotationsIsMutable(); annotations_.add(index, builderForValue.build()); onChanged(); } else { annotationsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The annotations from the specified asset. * </pre> * * <code>repeated .google.cloud.visionai.v1.Annotation annotations = 1;</code> */ public Builder addAllAnnotations( java.lang.Iterable<? extends com.google.cloud.visionai.v1.Annotation> values) { if (annotationsBuilder_ == null) { ensureAnnotationsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, annotations_); onChanged(); } else { annotationsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The annotations from the specified asset. * </pre> * * <code>repeated .google.cloud.visionai.v1.Annotation annotations = 1;</code> */ public Builder clearAnnotations() { if (annotationsBuilder_ == null) { annotations_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { annotationsBuilder_.clear(); } return this; } /** * * * <pre> * The annotations from the specified asset. * </pre> * * <code>repeated .google.cloud.visionai.v1.Annotation annotations = 1;</code> */ public Builder removeAnnotations(int index) { if (annotationsBuilder_ == null) { ensureAnnotationsIsMutable(); annotations_.remove(index); onChanged(); } else { annotationsBuilder_.remove(index); } return this; } /** * * * <pre> * The annotations from the specified asset. * </pre> * * <code>repeated .google.cloud.visionai.v1.Annotation annotations = 1;</code> */ public com.google.cloud.visionai.v1.Annotation.Builder getAnnotationsBuilder(int index) { return getAnnotationsFieldBuilder().getBuilder(index); } /** * * * <pre> * The annotations from the specified asset. * </pre> * * <code>repeated .google.cloud.visionai.v1.Annotation annotations = 1;</code> */ public com.google.cloud.visionai.v1.AnnotationOrBuilder getAnnotationsOrBuilder(int index) { if (annotationsBuilder_ == null) { return annotations_.get(index); } else { return annotationsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The annotations from the specified asset. * </pre> * * <code>repeated .google.cloud.visionai.v1.Annotation annotations = 1;</code> */ public java.util.List<? extends com.google.cloud.visionai.v1.AnnotationOrBuilder> getAnnotationsOrBuilderList() { if (annotationsBuilder_ != null) { return annotationsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(annotations_); } } /** * * * <pre> * The annotations from the specified asset. * </pre> * * <code>repeated .google.cloud.visionai.v1.Annotation annotations = 1;</code> */ public com.google.cloud.visionai.v1.Annotation.Builder addAnnotationsBuilder() { return getAnnotationsFieldBuilder() .addBuilder(com.google.cloud.visionai.v1.Annotation.getDefaultInstance()); } /** * * * <pre> * The annotations from the specified asset. * </pre> * * <code>repeated .google.cloud.visionai.v1.Annotation annotations = 1;</code> */ public com.google.cloud.visionai.v1.Annotation.Builder addAnnotationsBuilder(int index) { return getAnnotationsFieldBuilder() .addBuilder(index, com.google.cloud.visionai.v1.Annotation.getDefaultInstance()); } /** * * * <pre> * The annotations from the specified asset. * </pre> * * <code>repeated .google.cloud.visionai.v1.Annotation annotations = 1;</code> */ public java.util.List<com.google.cloud.visionai.v1.Annotation.Builder> getAnnotationsBuilderList() { return getAnnotationsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.visionai.v1.Annotation, com.google.cloud.visionai.v1.Annotation.Builder, com.google.cloud.visionai.v1.AnnotationOrBuilder> getAnnotationsFieldBuilder() { if (annotationsBuilder_ == null) { annotationsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.visionai.v1.Annotation, com.google.cloud.visionai.v1.Annotation.Builder, com.google.cloud.visionai.v1.AnnotationOrBuilder>( annotations_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); annotations_ = null; } return annotationsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.visionai.v1.ListAnnotationsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.visionai.v1.ListAnnotationsResponse) private static final com.google.cloud.visionai.v1.ListAnnotationsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.visionai.v1.ListAnnotationsResponse(); } public static com.google.cloud.visionai.v1.ListAnnotationsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListAnnotationsResponse> PARSER = new com.google.protobuf.AbstractParser<ListAnnotationsResponse>() { @java.lang.Override public ListAnnotationsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListAnnotationsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListAnnotationsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.visionai.v1.ListAnnotationsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/harmony
36,708
classlib/modules/swing/src/test/api/java.injected/javax/swing/table/DefaultTableModelTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @author Anton Avtamonov */ package javax.swing.table; import java.util.Arrays; import java.util.Vector; import javax.swing.event.TableModelEvent; @SuppressWarnings("unchecked") public class DefaultTableModelTest extends BasicSwingTableTestCase { private DefaultTableModel model; public DefaultTableModelTest(final String name) { super(name); } @Override protected void setUp() throws Exception { model = new DefaultTableModel(); } @Override protected void tearDown() throws Exception { model = null; } public void testDefaultTableModel() throws Exception { assertNotNull(model.dataVector); assertNotNull(model.columnIdentifiers); assertEquals(0, model.dataVector.size()); assertEquals(0, model.columnIdentifiers.size()); model = new DefaultTableModel(3, 4); assertEquals(4, model.columnIdentifiers.size()); assertNull(model.columnIdentifiers.get(0)); assertNull(model.columnIdentifiers.get(3)); assertEquals(3, model.dataVector.size()); assertTrue(model.dataVector.get(0) instanceof Vector); assertTrue(model.dataVector.get(2) instanceof Vector); assertEquals(4, ((Vector) model.dataVector.get(0)).size()); assertEquals(4, ((Vector) model.dataVector.get(2)).size()); assertNull(((Vector) model.dataVector.get(0)).get(0)); assertNull(((Vector) model.dataVector.get(2)).get(3)); model = new DefaultTableModel(new Object[] { "col1", "col2", "col3" }, 4); assertEquals(3, model.columnIdentifiers.size()); assertEquals("col2", model.columnIdentifiers.get(1)); assertEquals(4, model.dataVector.size()); Vector columnNames = new Vector(Arrays.asList(new Object[] { "col1", "col2", "col3" })); model = new DefaultTableModel(columnNames, 4); assertTrue(columnNames == model.columnIdentifiers); assertEquals(3, model.columnIdentifiers.size()); assertEquals("col2", model.columnIdentifiers.get(1)); assertEquals(4, model.dataVector.size()); Vector dataVector = new Vector(); dataVector .add(new Vector(Arrays.asList(new Object[] { "data11", "data12", "data13" }))); dataVector .add(new Vector(Arrays.asList(new Object[] { "data21", "data22", "data23" }))); model = new DefaultTableModel(dataVector, new Vector(Arrays.asList(new Object[] { "col1", "col2", "col3" }))); assertTrue(dataVector == model.dataVector); assertEquals(3, model.columnIdentifiers.size()); assertEquals(2, model.dataVector.size()); model = new DefaultTableModel(null, new Vector(Arrays.asList(new Object[] { "col1", "col2", "col3" }))); assertNotNull(model.dataVector); assertEquals(0, model.dataVector.size()); model = new DefaultTableModel(dataVector, null); assertNotNull(model.columnIdentifiers); assertEquals(0, model.columnIdentifiers.size()); assertEquals(2, model.dataVector.size()); assertEquals(0, ((Vector) model.dataVector.get(0)).size()); model = new DefaultTableModel(new Object[][] { { "data11", "data12", "data13" }, { "data21", "data22", "data23" } }, new Object[] { "col1", "col2", "col3" }); assertEquals(3, model.columnIdentifiers.size()); assertEquals(2, model.dataVector.size()); } public void testGetSetDataVector() throws Exception { Vector dataVector = new Vector(); dataVector .add(new Vector(Arrays.asList(new Object[] { "data11", "data12", "data13" }))); dataVector .add(new Vector(Arrays.asList(new Object[] { "data21", "data22", "data23" }))); model = new DefaultTableModel(dataVector, new Vector(Arrays.asList(new Object[] { "col1", "col2", "col3" }))); assertTrue(dataVector == model.getDataVector()); TestTableModelListener listener = new TestTableModelListener(); model.addTableModelListener(listener); dataVector = new Vector(); dataVector.add(new Vector(Arrays.asList(new Object[] { "value11", "value12" }))); dataVector.add(new Vector(Arrays .asList(new Object[] { "value21", "value22", "value23" }))); dataVector.add(new Vector(Arrays.asList(new Object[] { "value31" }))); model.setDataVector(dataVector, new Vector(Arrays .asList(new Object[] { "col1", "col2" }))); assertTrue(dataVector == model.getDataVector()); assertTrue(dataVector == model.dataVector); assertEquals(2, model.columnIdentifiers.size()); assertEquals(2, ((Vector) model.getDataVector().get(0)).size()); assertEquals(2, ((Vector) model.getDataVector().get(1)).size()); assertEquals(2, ((Vector) model.getDataVector().get(2)).size()); assertEquals("value22", ((Vector) model.getDataVector().get(1)).get(1)); assertNull(((Vector) model.getDataVector().get(2)).get(1)); assertTrue(listener.eventOccured()); assertEquals(model, listener.getEvent().getSource()); assertEquals(TableModelEvent.HEADER_ROW, listener.getEvent().getFirstRow()); assertEquals(TableModelEvent.HEADER_ROW, listener.getEvent().getLastRow()); assertEquals(TableModelEvent.ALL_COLUMNS, listener.getEvent().getColumn()); assertEquals(TableModelEvent.UPDATE, listener.getEvent().getType()); listener.reset(); model.addTableModelListener(listener); Object[][] dataArray = new Object[][] { new Object[] { "value11", "value12" }, new Object[] { "value21", "value22", "value23" }, new Object[] { "value31" } }; model.setDataVector(dataArray, new Object[] { "col1", "col2" }); assertEquals(2, model.columnIdentifiers.size()); assertEquals(2, ((Vector) model.dataVector.get(0)).size()); assertEquals(2, ((Vector) model.dataVector.get(1)).size()); assertEquals(2, ((Vector) model.dataVector.get(2)).size()); assertEquals("value22", ((Vector) model.dataVector.get(1)).get(1)); assertNull(((Vector) model.dataVector.get(2)).get(1)); assertTrue(listener.eventOccured()); assertEquals(model, listener.getEvent().getSource()); assertEquals(TableModelEvent.HEADER_ROW, listener.getEvent().getFirstRow()); assertEquals(TableModelEvent.HEADER_ROW, listener.getEvent().getLastRow()); assertEquals(TableModelEvent.ALL_COLUMNS, listener.getEvent().getColumn()); assertEquals(TableModelEvent.UPDATE, listener.getEvent().getType()); } public void testNewDataAvailable() throws Exception { TestTableModelListener listener = new TestTableModelListener(); model.addTableModelListener(listener); TableModelEvent event = new TableModelEvent(model); model.newDataAvailable(event); assertTrue(listener.eventOccured()); assertEquals(event, listener.getEvent()); } public void testNewRowsAdded() throws Exception { TestTableModelListener listener = new TestTableModelListener(); model.addTableModelListener(listener); model.columnIdentifiers = new Vector(Arrays.asList(new Object[] { "col1", "col2" })); model.dataVector = new Vector(); model.dataVector.add(new Vector(Arrays.asList(new Object[] { "value11", "value12" }))); model.dataVector.add(new Vector(Arrays.asList(new Object[] { "value21", "value22", "value23" }))); model.dataVector.add(new Vector(Arrays.asList(new Object[] { "value31" }))); TableModelEvent event = new TableModelEvent(model, 1, 2, TableModelEvent.HEADER_ROW, TableModelEvent.DELETE); model.newRowsAdded(event); assertEquals(2, model.columnIdentifiers.size()); assertEquals(2, ((Vector) model.getDataVector().get(0)).size()); assertEquals(2, ((Vector) model.getDataVector().get(1)).size()); assertEquals(2, ((Vector) model.getDataVector().get(2)).size()); assertTrue(listener.eventOccured()); assertEquals(event, listener.getEvent()); } public void testRowsRemoved() throws Exception { TestTableModelListener listener = new TestTableModelListener(); model.addTableModelListener(listener); TableModelEvent event = new TableModelEvent(model); model.rowsRemoved(event); assertTrue(listener.eventOccured()); assertEquals(event, listener.getEvent()); } public void testSetNumRowsSetGetRowCount() throws Exception { assertEquals(0, model.getRowCount()); TestTableModelListener listener = new TestTableModelListener(); model.addTableModelListener(listener); model.columnIdentifiers = new Vector(Arrays.asList(new Object[] { "col1", "col2" })); model.dataVector = new Vector(); model.dataVector.add(new Vector(Arrays.asList(new Object[] { "value11", "value12" }))); model.dataVector.add(new Vector(Arrays.asList(new Object[] { "value21", "value22", "value23" }))); model.dataVector.add(new Vector(Arrays.asList(new Object[] { "value31" }))); assertEquals(3, model.getRowCount()); listener.reset(); model.setRowCount(5); assertEquals(5, model.getDataVector().size()); assertEquals(5, model.getRowCount()); assertTrue(listener.eventOccured()); assertEquals(model, listener.getEvent().getSource()); assertEquals(3, listener.getEvent().getFirstRow()); assertEquals(4, listener.getEvent().getLastRow()); assertEquals(TableModelEvent.ALL_COLUMNS, listener.getEvent().getColumn()); assertEquals(TableModelEvent.INSERT, listener.getEvent().getType()); assertEquals(3, ((Vector) model.dataVector.get(1)).size()); assertEquals(1, ((Vector) model.dataVector.get(2)).size()); assertEquals(2, ((Vector) model.dataVector.get(3)).size()); assertEquals(2, ((Vector) model.dataVector.get(4)).size()); listener.reset(); model.setRowCount(5); assertEquals(5, model.getDataVector().size()); assertFalse(listener.eventOccured()); listener.reset(); model.setNumRows(1); assertEquals(1, model.getDataVector().size()); assertEquals(1, model.getRowCount()); assertTrue(listener.eventOccured()); assertEquals(model, listener.getEvent().getSource()); assertEquals(1, listener.getEvent().getFirstRow()); assertEquals(4, listener.getEvent().getLastRow()); assertEquals(TableModelEvent.ALL_COLUMNS, listener.getEvent().getColumn()); assertEquals(TableModelEvent.DELETE, listener.getEvent().getType()); } public void testAddRow() throws Exception { TestTableModelListener listener = new TestTableModelListener(); model.addTableModelListener(listener); model.columnIdentifiers = new Vector(Arrays.asList(new Object[] { "col1", "col2" })); model.dataVector = new Vector(); model.dataVector.add(new Vector(Arrays.asList(new Object[] { "value11", "value12" }))); model.dataVector.add(new Vector(Arrays.asList(new Object[] { "value21", "value22", "value23" }))); model.dataVector.add(new Vector(Arrays.asList(new Object[] { "value31" }))); listener.reset(); model.addRow((Vector) null); assertEquals(4, model.getDataVector().size()); assertEquals(3, ((Vector) model.dataVector.get(1)).size()); assertEquals(1, ((Vector) model.dataVector.get(2)).size()); assertEquals(2, ((Vector) model.dataVector.get(3)).size()); assertNull(((Vector) model.dataVector.get(3)).get(0)); assertNull(((Vector) model.dataVector.get(3)).get(1)); assertTrue(listener.eventOccured()); assertEquals(model, listener.getEvent().getSource()); assertEquals(3, listener.getEvent().getFirstRow()); assertEquals(3, listener.getEvent().getLastRow()); assertEquals(TableModelEvent.ALL_COLUMNS, listener.getEvent().getColumn()); assertEquals(TableModelEvent.INSERT, listener.getEvent().getType()); listener.reset(); model.addRow(new Object[] { "a" }); assertEquals(5, model.getDataVector().size()); assertEquals(3, ((Vector) model.dataVector.get(1)).size()); assertEquals(1, ((Vector) model.dataVector.get(2)).size()); assertEquals(2, ((Vector) model.dataVector.get(3)).size()); assertEquals(2, ((Vector) model.dataVector.get(4)).size()); assertEquals("a", ((Vector) model.dataVector.get(4)).get(0)); assertNull(((Vector) model.dataVector.get(4)).get(1)); } public void testInsertRow() throws Exception { TestTableModelListener listener = new TestTableModelListener(); model.addTableModelListener(listener); model.columnIdentifiers = new Vector(Arrays.asList(new Object[] { "col1", "col2" })); model.dataVector = new Vector(); model.dataVector.add(new Vector(Arrays.asList(new Object[] { "value11", "value12" }))); model.dataVector.add(new Vector(Arrays.asList(new Object[] { "value21", "value22", "value23" }))); model.dataVector.add(new Vector(Arrays.asList(new Object[] { "value31" }))); listener.reset(); model.insertRow(1, (Vector) null); assertEquals(4, model.getDataVector().size()); assertEquals(2, ((Vector) model.dataVector.get(0)).size()); assertEquals(2, ((Vector) model.dataVector.get(1)).size()); assertEquals(3, ((Vector) model.dataVector.get(2)).size()); assertEquals(1, ((Vector) model.dataVector.get(3)).size()); assertNull(((Vector) model.dataVector.get(1)).get(0)); assertNull(((Vector) model.dataVector.get(1)).get(1)); assertTrue(listener.eventOccured()); assertEquals(model, listener.getEvent().getSource()); assertEquals(1, listener.getEvent().getFirstRow()); assertEquals(1, listener.getEvent().getLastRow()); assertEquals(TableModelEvent.ALL_COLUMNS, listener.getEvent().getColumn()); assertEquals(TableModelEvent.INSERT, listener.getEvent().getType()); listener.reset(); model.insertRow(0, new Object[] { "a", "b", "c" }); assertEquals(5, model.getDataVector().size()); assertEquals(2, ((Vector) model.dataVector.get(0)).size()); assertEquals(2, ((Vector) model.dataVector.get(1)).size()); assertEquals(2, ((Vector) model.dataVector.get(2)).size()); assertEquals(3, ((Vector) model.dataVector.get(3)).size()); assertEquals(1, ((Vector) model.dataVector.get(4)).size()); assertEquals("a", ((Vector) model.dataVector.get(0)).get(0)); assertEquals("b", ((Vector) model.dataVector.get(0)).get(1)); testExceptionalCase(new ArrayIndexOutOfBoundsExceptionalCase() { @Override public void exceptionalAction() throws Exception { model.insertRow(6, new Object[] { "a", "b", "c" }); } }); } public void testMoveRow() throws Exception { TestTableModelListener listener = new TestTableModelListener(); model.addTableModelListener(listener); model.columnIdentifiers = new Vector(Arrays.asList(new Object[] { "col1", "col2" })); model.dataVector = new Vector(); model.dataVector.add(new Vector(Arrays.asList(new Object[] { "value11" }))); model.dataVector.add(new Vector(Arrays.asList(new Object[] { "value21", "value22" }))); model.dataVector.add(new Vector(Arrays.asList(new Object[] { "value31", "value32", "value33" }))); model.dataVector.add(new Vector(Arrays.asList(new Object[] { "value41", "value42", "value43", "value44" }))); model.dataVector.add(new Vector(Arrays.asList(new Object[] { "value51", "value52", "value53", "value54", "value55" }))); listener.reset(); model.moveRow(0, 1, 1); assertEquals(5, model.getDataVector().size()); assertEquals(3, ((Vector) model.dataVector.get(0)).size()); assertEquals(1, ((Vector) model.dataVector.get(1)).size()); assertEquals(2, ((Vector) model.dataVector.get(2)).size()); assertEquals(4, ((Vector) model.dataVector.get(3)).size()); assertEquals(5, ((Vector) model.dataVector.get(4)).size()); assertTrue(listener.eventOccured()); assertEquals(model, listener.getEvent().getSource()); assertEquals(0, listener.getEvent().getFirstRow()); assertEquals(2, listener.getEvent().getLastRow()); assertEquals(TableModelEvent.ALL_COLUMNS, listener.getEvent().getColumn()); assertEquals(TableModelEvent.UPDATE, listener.getEvent().getType()); listener.reset(); model.moveRow(1, 1, 2); assertEquals(5, model.getDataVector().size()); assertEquals(3, ((Vector) model.dataVector.get(0)).size()); assertEquals(2, ((Vector) model.dataVector.get(1)).size()); assertEquals(1, ((Vector) model.dataVector.get(2)).size()); assertEquals(4, ((Vector) model.dataVector.get(3)).size()); assertEquals(5, ((Vector) model.dataVector.get(4)).size()); assertTrue(listener.eventOccured()); assertEquals(model, listener.getEvent().getSource()); assertEquals(1, listener.getEvent().getFirstRow()); assertEquals(2, listener.getEvent().getLastRow()); assertEquals(TableModelEvent.ALL_COLUMNS, listener.getEvent().getColumn()); assertEquals(TableModelEvent.UPDATE, listener.getEvent().getType()); listener.reset(); model.moveRow(2, 2, 0); assertEquals(5, model.getDataVector().size()); assertEquals(1, ((Vector) model.dataVector.get(0)).size()); assertEquals(3, ((Vector) model.dataVector.get(1)).size()); assertEquals(2, ((Vector) model.dataVector.get(2)).size()); assertEquals(4, ((Vector) model.dataVector.get(3)).size()); assertEquals(5, ((Vector) model.dataVector.get(4)).size()); assertTrue(listener.eventOccured()); assertEquals(model, listener.getEvent().getSource()); assertEquals(0, listener.getEvent().getFirstRow()); assertEquals(2, listener.getEvent().getLastRow()); assertEquals(TableModelEvent.ALL_COLUMNS, listener.getEvent().getColumn()); assertEquals(TableModelEvent.UPDATE, listener.getEvent().getType()); listener.reset(); model.moveRow(0, 3, 1); assertEquals(5, model.getDataVector().size()); assertEquals(5, ((Vector) model.dataVector.get(0)).size()); assertEquals(1, ((Vector) model.dataVector.get(1)).size()); assertEquals(3, ((Vector) model.dataVector.get(2)).size()); assertEquals(2, ((Vector) model.dataVector.get(3)).size()); assertEquals(4, ((Vector) model.dataVector.get(4)).size()); assertTrue(listener.eventOccured()); assertEquals(model, listener.getEvent().getSource()); assertEquals(0, listener.getEvent().getFirstRow()); assertEquals(4, listener.getEvent().getLastRow()); assertEquals(TableModelEvent.ALL_COLUMNS, listener.getEvent().getColumn()); assertEquals(TableModelEvent.UPDATE, listener.getEvent().getType()); listener.reset(); model.moveRow(2, 4, 1); assertEquals(5, model.getDataVector().size()); assertEquals(5, ((Vector) model.dataVector.get(0)).size()); assertEquals(3, ((Vector) model.dataVector.get(1)).size()); assertEquals(2, ((Vector) model.dataVector.get(2)).size()); assertEquals(4, ((Vector) model.dataVector.get(3)).size()); assertEquals(1, ((Vector) model.dataVector.get(4)).size()); assertTrue(listener.eventOccured()); assertEquals(model, listener.getEvent().getSource()); assertEquals(1, listener.getEvent().getFirstRow()); assertEquals(4, listener.getEvent().getLastRow()); assertEquals(TableModelEvent.ALL_COLUMNS, listener.getEvent().getColumn()); assertEquals(TableModelEvent.UPDATE, listener.getEvent().getType()); testExceptionalCase(new ArrayIndexOutOfBoundsExceptionalCase() { @Override public void exceptionalAction() throws Exception { model.moveRow(-1, 2, 0); } }); testExceptionalCase(new ArrayIndexOutOfBoundsExceptionalCase() { @Override public void exceptionalAction() throws Exception { model.moveRow(0, 3, 2); } }); testExceptionalCase(new ArrayIndexOutOfBoundsExceptionalCase() { @Override public void exceptionalAction() throws Exception { model.moveRow(1, 2, 4); } }); testExceptionalCase(new ArrayIndexOutOfBoundsExceptionalCase() { @Override public void exceptionalAction() throws Exception { model.moveRow(1, 7, 0); } }); } public void testRemoveRow() throws Exception { TestTableModelListener listener = new TestTableModelListener(); model.addTableModelListener(listener); model.columnIdentifiers = new Vector(Arrays.asList(new Object[] { "col1", "col2" })); model.dataVector = new Vector(); model.dataVector.add(new Vector(Arrays.asList(new Object[] { "value11", "value12" }))); model.dataVector.add(new Vector(Arrays.asList(new Object[] { "value21", "value22", "value23" }))); model.dataVector.add(new Vector(Arrays.asList(new Object[] { "value31" }))); listener.reset(); model.removeRow(1); assertEquals(2, model.getDataVector().size()); assertEquals(2, ((Vector) model.dataVector.get(0)).size()); assertEquals(1, ((Vector) model.dataVector.get(1)).size()); assertTrue(listener.eventOccured()); assertEquals(model, listener.getEvent().getSource()); assertEquals(1, listener.getEvent().getFirstRow()); assertEquals(1, listener.getEvent().getLastRow()); assertEquals(TableModelEvent.ALL_COLUMNS, listener.getEvent().getColumn()); assertEquals(TableModelEvent.DELETE, listener.getEvent().getType()); testExceptionalCase(new ArrayIndexOutOfBoundsExceptionalCase() { @Override public void exceptionalAction() throws Exception { model.removeRow(-1); } }); testExceptionalCase(new ArrayIndexOutOfBoundsExceptionalCase() { @Override public void exceptionalAction() throws Exception { model.removeRow(3); } }); } public void testSetColumnIdentifiers() throws Exception { TestTableModelListener listener = new TestTableModelListener(); model.addTableModelListener(listener); model.columnIdentifiers = new Vector(Arrays.asList(new Object[] { "col1", "col2" })); model.dataVector = new Vector(); model.dataVector.add(new Vector(Arrays.asList(new Object[] { "value11", "value12" }))); model.dataVector.add(new Vector(Arrays.asList(new Object[] { "value21", "value22", "value23" }))); model.dataVector.add(new Vector(Arrays.asList(new Object[] { "value31" }))); listener.reset(); model.setColumnIdentifiers(new Vector(Arrays.asList(new Object[] { "col1", "col2" }))); assertEquals(2, model.columnIdentifiers.size()); assertEquals(3, model.getDataVector().size()); assertEquals(2, ((Vector) model.dataVector.get(0)).size()); assertEquals(2, ((Vector) model.dataVector.get(1)).size()); assertEquals(2, ((Vector) model.dataVector.get(2)).size()); assertTrue(listener.eventOccured()); assertEquals(model, listener.getEvent().getSource()); assertEquals(TableModelEvent.HEADER_ROW, listener.getEvent().getFirstRow()); assertEquals(TableModelEvent.HEADER_ROW, listener.getEvent().getLastRow()); assertEquals(TableModelEvent.ALL_COLUMNS, listener.getEvent().getColumn()); assertEquals(TableModelEvent.UPDATE, listener.getEvent().getType()); listener.reset(); model.setColumnIdentifiers(new Vector(Arrays.asList(new Object[] { "col1", "col2", "col3" }))); assertEquals(3, model.columnIdentifiers.size()); assertEquals(3, model.getDataVector().size()); assertEquals(3, ((Vector) model.dataVector.get(0)).size()); assertEquals(3, ((Vector) model.dataVector.get(1)).size()); assertEquals(3, ((Vector) model.dataVector.get(2)).size()); assertTrue(listener.eventOccured()); assertEquals(model, listener.getEvent().getSource()); assertEquals(TableModelEvent.HEADER_ROW, listener.getEvent().getFirstRow()); assertEquals(TableModelEvent.HEADER_ROW, listener.getEvent().getLastRow()); assertEquals(TableModelEvent.ALL_COLUMNS, listener.getEvent().getColumn()); assertEquals(TableModelEvent.UPDATE, listener.getEvent().getType()); listener.reset(); model.setColumnIdentifiers(new Object[] { "col1" }); assertEquals(1, model.columnIdentifiers.size()); assertEquals(3, model.getDataVector().size()); assertEquals(1, ((Vector) model.dataVector.get(0)).size()); assertEquals(1, ((Vector) model.dataVector.get(1)).size()); assertEquals(1, ((Vector) model.dataVector.get(2)).size()); assertTrue(listener.eventOccured()); assertEquals(model, listener.getEvent().getSource()); assertEquals(TableModelEvent.HEADER_ROW, listener.getEvent().getFirstRow()); assertEquals(TableModelEvent.HEADER_ROW, listener.getEvent().getLastRow()); assertEquals(TableModelEvent.ALL_COLUMNS, listener.getEvent().getColumn()); assertEquals(TableModelEvent.UPDATE, listener.getEvent().getType()); listener.reset(); model.setColumnIdentifiers(new Object[] { "col1" }); assertEquals(1, model.columnIdentifiers.size()); assertEquals(3, model.getDataVector().size()); assertEquals(1, ((Vector) model.dataVector.get(0)).size()); assertEquals(1, ((Vector) model.dataVector.get(1)).size()); assertEquals(1, ((Vector) model.dataVector.get(2)).size()); assertTrue(listener.eventOccured()); assertEquals(model, listener.getEvent().getSource()); assertEquals(TableModelEvent.HEADER_ROW, listener.getEvent().getFirstRow()); assertEquals(TableModelEvent.HEADER_ROW, listener.getEvent().getLastRow()); assertEquals(TableModelEvent.ALL_COLUMNS, listener.getEvent().getColumn()); assertEquals(TableModelEvent.UPDATE, listener.getEvent().getType()); } public void testGetSetColumnCount() throws Exception { assertEquals(0, model.getColumnCount()); TestTableModelListener listener = new TestTableModelListener(); model.addTableModelListener(listener); model.columnIdentifiers = new Vector(Arrays.asList(new Object[] { "col1", "col2" })); model.dataVector = new Vector(); model.dataVector.add(new Vector(Arrays.asList(new Object[] { "value11", "value12" }))); model.dataVector.add(new Vector(Arrays.asList(new Object[] { "value21", "value22", "value23" }))); model.dataVector.add(new Vector(Arrays.asList(new Object[] { "value31" }))); listener.reset(); model.setColumnCount(2); assertEquals(2, model.columnIdentifiers.size()); assertEquals(2, model.getColumnCount()); assertEquals(3, model.getDataVector().size()); assertEquals(2, ((Vector) model.dataVector.get(0)).size()); assertEquals(2, ((Vector) model.dataVector.get(1)).size()); assertEquals(2, ((Vector) model.dataVector.get(2)).size()); assertTrue(listener.eventOccured()); assertEquals(model, listener.getEvent().getSource()); assertEquals(TableModelEvent.HEADER_ROW, listener.getEvent().getFirstRow()); assertEquals(TableModelEvent.HEADER_ROW, listener.getEvent().getLastRow()); assertEquals(TableModelEvent.ALL_COLUMNS, listener.getEvent().getColumn()); assertEquals(TableModelEvent.UPDATE, listener.getEvent().getType()); listener.reset(); model.setColumnCount(3); assertEquals(3, model.columnIdentifiers.size()); assertEquals(3, model.getColumnCount()); assertEquals(3, model.getDataVector().size()); assertEquals(3, ((Vector) model.dataVector.get(0)).size()); assertEquals(3, ((Vector) model.dataVector.get(1)).size()); assertEquals(3, ((Vector) model.dataVector.get(2)).size()); assertTrue(listener.eventOccured()); assertEquals(model, listener.getEvent().getSource()); assertEquals(TableModelEvent.HEADER_ROW, listener.getEvent().getFirstRow()); assertEquals(TableModelEvent.HEADER_ROW, listener.getEvent().getLastRow()); assertEquals(TableModelEvent.ALL_COLUMNS, listener.getEvent().getColumn()); assertEquals(TableModelEvent.UPDATE, listener.getEvent().getType()); listener.reset(); model.setColumnCount(1); assertEquals(1, model.columnIdentifiers.size()); assertEquals(1, model.getColumnCount()); assertEquals(3, model.getDataVector().size()); assertEquals(1, ((Vector) model.dataVector.get(0)).size()); assertEquals(1, ((Vector) model.dataVector.get(1)).size()); assertEquals(1, ((Vector) model.dataVector.get(2)).size()); assertTrue(listener.eventOccured()); assertEquals(model, listener.getEvent().getSource()); assertEquals(TableModelEvent.HEADER_ROW, listener.getEvent().getFirstRow()); assertEquals(TableModelEvent.HEADER_ROW, listener.getEvent().getLastRow()); assertEquals(TableModelEvent.ALL_COLUMNS, listener.getEvent().getColumn()); assertEquals(TableModelEvent.UPDATE, listener.getEvent().getType()); } public void testAddColumn() throws Exception { TestTableModelListener listener = new TestTableModelListener(); model.addTableModelListener(listener); model.columnIdentifiers = new Vector(Arrays.asList(new Object[] { "col1", "col2" })); model.dataVector = new Vector(); model.dataVector.add(new Vector(Arrays.asList(new Object[] { "value11", "value12" }))); model.dataVector.add(new Vector(Arrays.asList(new Object[] { "value21", "value22", "value23" }))); model.dataVector.add(new Vector(Arrays.asList(new Object[] { "value31" }))); listener.reset(); model.addColumn(null, new Object[] { "val13", "val23" }); assertEquals(3, model.columnIdentifiers.size()); assertEquals(3, model.getDataVector().size()); assertEquals(3, ((Vector) model.dataVector.get(0)).size()); assertEquals(3, ((Vector) model.dataVector.get(1)).size()); assertEquals(3, ((Vector) model.dataVector.get(2)).size()); assertNull(model.columnIdentifiers.get(2)); assertEquals("val13", ((Vector) model.dataVector.get(0)).get(2)); assertEquals("val23", ((Vector) model.dataVector.get(1)).get(2)); assertNull(((Vector) model.dataVector.get(2)).get(2)); assertTrue(listener.eventOccured()); assertEquals(model, listener.getEvent().getSource()); assertEquals(TableModelEvent.HEADER_ROW, listener.getEvent().getFirstRow()); assertEquals(TableModelEvent.HEADER_ROW, listener.getEvent().getLastRow()); assertEquals(TableModelEvent.ALL_COLUMNS, listener.getEvent().getColumn()); assertEquals(TableModelEvent.UPDATE, listener.getEvent().getType()); listener.reset(); model.addColumn("col4"); assertEquals(4, model.columnIdentifiers.size()); assertEquals(3, model.getDataVector().size()); assertEquals(4, ((Vector) model.dataVector.get(0)).size()); assertEquals(4, ((Vector) model.dataVector.get(1)).size()); assertEquals(4, ((Vector) model.dataVector.get(2)).size()); assertEquals("col4", model.columnIdentifiers.get(3)); assertTrue(listener.eventOccured()); } public void testGetColumnName() throws Exception { assertEquals("A", model.getColumnName(0)); assertEquals("B", model.getColumnName(1)); model.setColumnCount(2); assertEquals("A", model.getColumnName(0)); assertEquals("B", model.getColumnName(1)); model.setColumnIdentifiers(new Object[] { "col1", null, "col3", new Integer(4) }); assertEquals("col1", model.getColumnName(0)); assertEquals("B", model.getColumnName(1)); assertEquals("col3", model.getColumnName(2)); assertEquals("4", model.getColumnName(3)); assertEquals("E", model.getColumnName(4)); testExceptionalCase(new ArrayIndexOutOfBoundsExceptionalCase() { @Override public void exceptionalAction() throws Exception { model.getColumnName(-1); } }); } public void testIsCellEditable() throws Exception { assertTrue(model.isCellEditable(2, 4)); } public void testGetSetValueAt() throws Exception { TestTableModelListener listener = new TestTableModelListener(); model.addTableModelListener(listener); model.columnIdentifiers = new Vector(Arrays.asList(new Object[] { "col1", "col2" })); model.dataVector = new Vector(); model.dataVector.add(new Vector(Arrays.asList(new Object[] { "value11", "value12" }))); model.dataVector.add(new Vector(Arrays.asList(new Object[] { "value21", "value22", "value23" }))); model.dataVector.add(new Vector(Arrays.asList(new Object[] { "value31" }))); assertEquals("value22", model.getValueAt(1, 1)); assertEquals("value31", model.getValueAt(2, 0)); testExceptionalCase(new ArrayIndexOutOfBoundsExceptionalCase() { @Override public void exceptionalAction() throws Exception { model.getValueAt(2, 1); } }); testExceptionalCase(new ArrayIndexOutOfBoundsExceptionalCase() { @Override public void exceptionalAction() throws Exception { model.setValueAt("value32", 2, 1); } }); listener.reset(); model.setValueAt(new Integer(3), 1, 0); assertEquals(new Integer(3), model.getValueAt(1, 0)); assertTrue(listener.eventOccured()); assertEquals(model, listener.getEvent().getSource()); assertEquals(1, listener.getEvent().getFirstRow()); assertEquals(1, listener.getEvent().getLastRow()); assertEquals(0, listener.getEvent().getColumn()); assertEquals(TableModelEvent.UPDATE, listener.getEvent().getType()); } public void testConvertToVector() throws Exception { Object[] array = new Object[] { "1", new Integer(6), new String[] { "31", "32" } }; assertEquals(new Vector(Arrays.asList(array)), DefaultTableModel.convertToVector(array)); Object[][] arrayOfArray = new Object[][] { new Object[] { "1" }, new Object[] { new Integer(6), new Character('c') }, new String[] { "31", "32" } }; Vector expected = new Vector(); expected.add(Arrays.asList(new Object[] { "1" })); expected.add(Arrays.asList(new Object[] { new Integer(6), new Character('c') })); expected.add(Arrays.asList(new String[] { "31", "32" })); assertEquals(expected, DefaultTableModel.convertToVector(arrayOfArray)); } }
googleapis/google-cloud-java
36,501
java-appengine-admin/proto-google-cloud-appengine-admin-v1/src/main/java/com/google/appengine/v1/UpdateApplicationRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/appengine/v1/appengine.proto // Protobuf Java Version: 3.25.8 package com.google.appengine.v1; /** * * * <pre> * Request message for `Applications.UpdateApplication`. * </pre> * * Protobuf type {@code google.appengine.v1.UpdateApplicationRequest} */ public final class UpdateApplicationRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.appengine.v1.UpdateApplicationRequest) UpdateApplicationRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateApplicationRequest.newBuilder() to construct. private UpdateApplicationRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateApplicationRequest() { name_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateApplicationRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.appengine.v1.AppengineProto .internal_static_google_appengine_v1_UpdateApplicationRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.appengine.v1.AppengineProto .internal_static_google_appengine_v1_UpdateApplicationRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.appengine.v1.UpdateApplicationRequest.class, com.google.appengine.v1.UpdateApplicationRequest.Builder.class); } private int bitField0_; public static final int NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * Name of the Application resource to update. Example: `apps/myapp`. * </pre> * * <code>string name = 1;</code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Name of the Application resource to update. Example: `apps/myapp`. * </pre> * * <code>string name = 1;</code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int APPLICATION_FIELD_NUMBER = 2; private com.google.appengine.v1.Application application_; /** * * * <pre> * An Application containing the updated resource. * </pre> * * <code>.google.appengine.v1.Application application = 2;</code> * * @return Whether the application field is set. */ @java.lang.Override public boolean hasApplication() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * An Application containing the updated resource. * </pre> * * <code>.google.appengine.v1.Application application = 2;</code> * * @return The application. */ @java.lang.Override public com.google.appengine.v1.Application getApplication() { return application_ == null ? com.google.appengine.v1.Application.getDefaultInstance() : application_; } /** * * * <pre> * An Application containing the updated resource. * </pre> * * <code>.google.appengine.v1.Application application = 2;</code> */ @java.lang.Override public com.google.appengine.v1.ApplicationOrBuilder getApplicationOrBuilder() { return application_ == null ? com.google.appengine.v1.Application.getDefaultInstance() : application_; } public static final int UPDATE_MASK_FIELD_NUMBER = 3; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Required. Standard field mask for the set of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Standard field mask for the set of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Required. Standard field mask for the set of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getApplication()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(3, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getApplication()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.appengine.v1.UpdateApplicationRequest)) { return super.equals(obj); } com.google.appengine.v1.UpdateApplicationRequest other = (com.google.appengine.v1.UpdateApplicationRequest) obj; if (!getName().equals(other.getName())) return false; if (hasApplication() != other.hasApplication()) return false; if (hasApplication()) { if (!getApplication().equals(other.getApplication())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); if (hasApplication()) { hash = (37 * hash) + APPLICATION_FIELD_NUMBER; hash = (53 * hash) + getApplication().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.appengine.v1.UpdateApplicationRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.appengine.v1.UpdateApplicationRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.appengine.v1.UpdateApplicationRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.appengine.v1.UpdateApplicationRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.appengine.v1.UpdateApplicationRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.appengine.v1.UpdateApplicationRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.appengine.v1.UpdateApplicationRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.appengine.v1.UpdateApplicationRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.appengine.v1.UpdateApplicationRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.appengine.v1.UpdateApplicationRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.appengine.v1.UpdateApplicationRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.appengine.v1.UpdateApplicationRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.appengine.v1.UpdateApplicationRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for `Applications.UpdateApplication`. * </pre> * * Protobuf type {@code google.appengine.v1.UpdateApplicationRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.appengine.v1.UpdateApplicationRequest) com.google.appengine.v1.UpdateApplicationRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.appengine.v1.AppengineProto .internal_static_google_appengine_v1_UpdateApplicationRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.appengine.v1.AppengineProto .internal_static_google_appengine_v1_UpdateApplicationRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.appengine.v1.UpdateApplicationRequest.class, com.google.appengine.v1.UpdateApplicationRequest.Builder.class); } // Construct using com.google.appengine.v1.UpdateApplicationRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getApplicationFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; name_ = ""; application_ = null; if (applicationBuilder_ != null) { applicationBuilder_.dispose(); applicationBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.appengine.v1.AppengineProto .internal_static_google_appengine_v1_UpdateApplicationRequest_descriptor; } @java.lang.Override public com.google.appengine.v1.UpdateApplicationRequest getDefaultInstanceForType() { return com.google.appengine.v1.UpdateApplicationRequest.getDefaultInstance(); } @java.lang.Override public com.google.appengine.v1.UpdateApplicationRequest build() { com.google.appengine.v1.UpdateApplicationRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.appengine.v1.UpdateApplicationRequest buildPartial() { com.google.appengine.v1.UpdateApplicationRequest result = new com.google.appengine.v1.UpdateApplicationRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.appengine.v1.UpdateApplicationRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.application_ = applicationBuilder_ == null ? application_ : applicationBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.appengine.v1.UpdateApplicationRequest) { return mergeFrom((com.google.appengine.v1.UpdateApplicationRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.appengine.v1.UpdateApplicationRequest other) { if (other == com.google.appengine.v1.UpdateApplicationRequest.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasApplication()) { mergeApplication(other.getApplication()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getApplicationFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 26: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * * * <pre> * Name of the Application resource to update. Example: `apps/myapp`. * </pre> * * <code>string name = 1;</code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Name of the Application resource to update. Example: `apps/myapp`. * </pre> * * <code>string name = 1;</code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Name of the Application resource to update. Example: `apps/myapp`. * </pre> * * <code>string name = 1;</code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Name of the Application resource to update. Example: `apps/myapp`. * </pre> * * <code>string name = 1;</code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Name of the Application resource to update. Example: `apps/myapp`. * </pre> * * <code>string name = 1;</code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.appengine.v1.Application application_; private com.google.protobuf.SingleFieldBuilderV3< com.google.appengine.v1.Application, com.google.appengine.v1.Application.Builder, com.google.appengine.v1.ApplicationOrBuilder> applicationBuilder_; /** * * * <pre> * An Application containing the updated resource. * </pre> * * <code>.google.appengine.v1.Application application = 2;</code> * * @return Whether the application field is set. */ public boolean hasApplication() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * An Application containing the updated resource. * </pre> * * <code>.google.appengine.v1.Application application = 2;</code> * * @return The application. */ public com.google.appengine.v1.Application getApplication() { if (applicationBuilder_ == null) { return application_ == null ? com.google.appengine.v1.Application.getDefaultInstance() : application_; } else { return applicationBuilder_.getMessage(); } } /** * * * <pre> * An Application containing the updated resource. * </pre> * * <code>.google.appengine.v1.Application application = 2;</code> */ public Builder setApplication(com.google.appengine.v1.Application value) { if (applicationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } application_ = value; } else { applicationBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * An Application containing the updated resource. * </pre> * * <code>.google.appengine.v1.Application application = 2;</code> */ public Builder setApplication(com.google.appengine.v1.Application.Builder builderForValue) { if (applicationBuilder_ == null) { application_ = builderForValue.build(); } else { applicationBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * An Application containing the updated resource. * </pre> * * <code>.google.appengine.v1.Application application = 2;</code> */ public Builder mergeApplication(com.google.appengine.v1.Application value) { if (applicationBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && application_ != null && application_ != com.google.appengine.v1.Application.getDefaultInstance()) { getApplicationBuilder().mergeFrom(value); } else { application_ = value; } } else { applicationBuilder_.mergeFrom(value); } if (application_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * An Application containing the updated resource. * </pre> * * <code>.google.appengine.v1.Application application = 2;</code> */ public Builder clearApplication() { bitField0_ = (bitField0_ & ~0x00000002); application_ = null; if (applicationBuilder_ != null) { applicationBuilder_.dispose(); applicationBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * An Application containing the updated resource. * </pre> * * <code>.google.appengine.v1.Application application = 2;</code> */ public com.google.appengine.v1.Application.Builder getApplicationBuilder() { bitField0_ |= 0x00000002; onChanged(); return getApplicationFieldBuilder().getBuilder(); } /** * * * <pre> * An Application containing the updated resource. * </pre> * * <code>.google.appengine.v1.Application application = 2;</code> */ public com.google.appengine.v1.ApplicationOrBuilder getApplicationOrBuilder() { if (applicationBuilder_ != null) { return applicationBuilder_.getMessageOrBuilder(); } else { return application_ == null ? com.google.appengine.v1.Application.getDefaultInstance() : application_; } } /** * * * <pre> * An Application containing the updated resource. * </pre> * * <code>.google.appengine.v1.Application application = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.appengine.v1.Application, com.google.appengine.v1.Application.Builder, com.google.appengine.v1.ApplicationOrBuilder> getApplicationFieldBuilder() { if (applicationBuilder_ == null) { applicationBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.appengine.v1.Application, com.google.appengine.v1.Application.Builder, com.google.appengine.v1.ApplicationOrBuilder>( getApplication(), getParentForChildren(), isClean()); application_ = null; } return applicationBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Required. Standard field mask for the set of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * Required. Standard field mask for the set of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Required. Standard field mask for the set of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. Standard field mask for the set of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. Standard field mask for the set of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000004; onChanged(); } return this; } /** * * * <pre> * Required. Standard field mask for the set of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000004); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. Standard field mask for the set of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000004; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Standard field mask for the set of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Required. Standard field mask for the set of fields to be updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.appengine.v1.UpdateApplicationRequest) } // @@protoc_insertion_point(class_scope:google.appengine.v1.UpdateApplicationRequest) private static final com.google.appengine.v1.UpdateApplicationRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.appengine.v1.UpdateApplicationRequest(); } public static com.google.appengine.v1.UpdateApplicationRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateApplicationRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateApplicationRequest>() { @java.lang.Override public UpdateApplicationRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateApplicationRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateApplicationRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.appengine.v1.UpdateApplicationRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,560
java-recaptchaenterprise/proto-google-cloud-recaptchaenterprise-v1/src/main/java/com/google/recaptchaenterprise/v1/AccountDefenderAssessment.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/recaptchaenterprise/v1/recaptchaenterprise.proto // Protobuf Java Version: 3.25.8 package com.google.recaptchaenterprise.v1; /** * * * <pre> * Account defender risk assessment. * </pre> * * Protobuf type {@code google.cloud.recaptchaenterprise.v1.AccountDefenderAssessment} */ public final class AccountDefenderAssessment extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.recaptchaenterprise.v1.AccountDefenderAssessment) AccountDefenderAssessmentOrBuilder { private static final long serialVersionUID = 0L; // Use AccountDefenderAssessment.newBuilder() to construct. private AccountDefenderAssessment(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private AccountDefenderAssessment() { labels_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new AccountDefenderAssessment(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto .internal_static_google_cloud_recaptchaenterprise_v1_AccountDefenderAssessment_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto .internal_static_google_cloud_recaptchaenterprise_v1_AccountDefenderAssessment_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.recaptchaenterprise.v1.AccountDefenderAssessment.class, com.google.recaptchaenterprise.v1.AccountDefenderAssessment.Builder.class); } /** * * * <pre> * Labels returned by account defender for this request. * </pre> * * Protobuf enum {@code * google.cloud.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel} */ public enum AccountDefenderLabel implements com.google.protobuf.ProtocolMessageEnum { /** * * * <pre> * Default unspecified type. * </pre> * * <code>ACCOUNT_DEFENDER_LABEL_UNSPECIFIED = 0;</code> */ ACCOUNT_DEFENDER_LABEL_UNSPECIFIED(0), /** * * * <pre> * The request matches a known good profile for the user. * </pre> * * <code>PROFILE_MATCH = 1;</code> */ PROFILE_MATCH(1), /** * * * <pre> * The request is potentially a suspicious login event and must be further * verified either through multi-factor authentication or another system. * </pre> * * <code>SUSPICIOUS_LOGIN_ACTIVITY = 2;</code> */ SUSPICIOUS_LOGIN_ACTIVITY(2), /** * * * <pre> * The request matched a profile that previously had suspicious account * creation behavior. This can mean that this is a fake account. * </pre> * * <code>SUSPICIOUS_ACCOUNT_CREATION = 3;</code> */ SUSPICIOUS_ACCOUNT_CREATION(3), /** * * * <pre> * The account in the request has a high number of related accounts. It does * not necessarily imply that the account is bad but can require further * investigation. * </pre> * * <code>RELATED_ACCOUNTS_NUMBER_HIGH = 4;</code> */ RELATED_ACCOUNTS_NUMBER_HIGH(4), UNRECOGNIZED(-1), ; /** * * * <pre> * Default unspecified type. * </pre> * * <code>ACCOUNT_DEFENDER_LABEL_UNSPECIFIED = 0;</code> */ public static final int ACCOUNT_DEFENDER_LABEL_UNSPECIFIED_VALUE = 0; /** * * * <pre> * The request matches a known good profile for the user. * </pre> * * <code>PROFILE_MATCH = 1;</code> */ public static final int PROFILE_MATCH_VALUE = 1; /** * * * <pre> * The request is potentially a suspicious login event and must be further * verified either through multi-factor authentication or another system. * </pre> * * <code>SUSPICIOUS_LOGIN_ACTIVITY = 2;</code> */ public static final int SUSPICIOUS_LOGIN_ACTIVITY_VALUE = 2; /** * * * <pre> * The request matched a profile that previously had suspicious account * creation behavior. This can mean that this is a fake account. * </pre> * * <code>SUSPICIOUS_ACCOUNT_CREATION = 3;</code> */ public static final int SUSPICIOUS_ACCOUNT_CREATION_VALUE = 3; /** * * * <pre> * The account in the request has a high number of related accounts. It does * not necessarily imply that the account is bad but can require further * investigation. * </pre> * * <code>RELATED_ACCOUNTS_NUMBER_HIGH = 4;</code> */ public static final int RELATED_ACCOUNTS_NUMBER_HIGH_VALUE = 4; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static AccountDefenderLabel valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static AccountDefenderLabel forNumber(int value) { switch (value) { case 0: return ACCOUNT_DEFENDER_LABEL_UNSPECIFIED; case 1: return PROFILE_MATCH; case 2: return SUSPICIOUS_LOGIN_ACTIVITY; case 3: return SUSPICIOUS_ACCOUNT_CREATION; case 4: return RELATED_ACCOUNTS_NUMBER_HIGH; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<AccountDefenderLabel> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<AccountDefenderLabel> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<AccountDefenderLabel>() { public AccountDefenderLabel findValueByNumber(int number) { return AccountDefenderLabel.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.recaptchaenterprise.v1.AccountDefenderAssessment.getDescriptor() .getEnumTypes() .get(0); } private static final AccountDefenderLabel[] VALUES = values(); public static AccountDefenderLabel valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private AccountDefenderLabel(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.cloud.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel) } public static final int LABELS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<java.lang.Integer> labels_; private static final com.google.protobuf.Internal.ListAdapter.Converter< java.lang.Integer, com.google.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel> labels_converter_ = new com.google.protobuf.Internal.ListAdapter.Converter< java.lang.Integer, com.google.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel>() { public com.google.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel convert(java.lang.Integer from) { com.google.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel result = com.google.recaptchaenterprise.v1.AccountDefenderAssessment .AccountDefenderLabel.forNumber(from); return result == null ? com.google.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel .UNRECOGNIZED : result; } }; /** * * * <pre> * Output only. Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel labels = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return A list containing the labels. */ @java.lang.Override public java.util.List< com.google.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel> getLabelsList() { return new com.google.protobuf.Internal.ListAdapter< java.lang.Integer, com.google.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel>( labels_, labels_converter_); } /** * * * <pre> * Output only. Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel labels = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The count of labels. */ @java.lang.Override public int getLabelsCount() { return labels_.size(); } /** * * * <pre> * Output only. Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel labels = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @param index The index of the element to return. * @return The labels at the given index. */ @java.lang.Override public com.google.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel getLabels( int index) { return labels_converter_.convert(labels_.get(index)); } /** * * * <pre> * Output only. Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel labels = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return A list containing the enum numeric values on the wire for labels. */ @java.lang.Override public java.util.List<java.lang.Integer> getLabelsValueList() { return labels_; } /** * * * <pre> * Output only. Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel labels = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @param index The index of the value to return. * @return The enum numeric value on the wire of labels at the given index. */ @java.lang.Override public int getLabelsValue(int index) { return labels_.get(index); } private int labelsMemoizedSerializedSize; private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (getLabelsList().size() > 0) { output.writeUInt32NoTag(10); output.writeUInt32NoTag(labelsMemoizedSerializedSize); } for (int i = 0; i < labels_.size(); i++) { output.writeEnumNoTag(labels_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; { int dataSize = 0; for (int i = 0; i < labels_.size(); i++) { dataSize += com.google.protobuf.CodedOutputStream.computeEnumSizeNoTag(labels_.get(i)); } size += dataSize; if (!getLabelsList().isEmpty()) { size += 1; size += com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(dataSize); } labelsMemoizedSerializedSize = dataSize; } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.recaptchaenterprise.v1.AccountDefenderAssessment)) { return super.equals(obj); } com.google.recaptchaenterprise.v1.AccountDefenderAssessment other = (com.google.recaptchaenterprise.v1.AccountDefenderAssessment) obj; if (!labels_.equals(other.labels_)) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getLabelsCount() > 0) { hash = (37 * hash) + LABELS_FIELD_NUMBER; hash = (53 * hash) + labels_.hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.recaptchaenterprise.v1.AccountDefenderAssessment parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.recaptchaenterprise.v1.AccountDefenderAssessment parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.recaptchaenterprise.v1.AccountDefenderAssessment parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.recaptchaenterprise.v1.AccountDefenderAssessment parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.recaptchaenterprise.v1.AccountDefenderAssessment parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.recaptchaenterprise.v1.AccountDefenderAssessment parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.recaptchaenterprise.v1.AccountDefenderAssessment parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.recaptchaenterprise.v1.AccountDefenderAssessment parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.recaptchaenterprise.v1.AccountDefenderAssessment parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.recaptchaenterprise.v1.AccountDefenderAssessment parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.recaptchaenterprise.v1.AccountDefenderAssessment parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.recaptchaenterprise.v1.AccountDefenderAssessment parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.recaptchaenterprise.v1.AccountDefenderAssessment prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Account defender risk assessment. * </pre> * * Protobuf type {@code google.cloud.recaptchaenterprise.v1.AccountDefenderAssessment} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.recaptchaenterprise.v1.AccountDefenderAssessment) com.google.recaptchaenterprise.v1.AccountDefenderAssessmentOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto .internal_static_google_cloud_recaptchaenterprise_v1_AccountDefenderAssessment_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto .internal_static_google_cloud_recaptchaenterprise_v1_AccountDefenderAssessment_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.recaptchaenterprise.v1.AccountDefenderAssessment.class, com.google.recaptchaenterprise.v1.AccountDefenderAssessment.Builder.class); } // Construct using com.google.recaptchaenterprise.v1.AccountDefenderAssessment.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; labels_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto .internal_static_google_cloud_recaptchaenterprise_v1_AccountDefenderAssessment_descriptor; } @java.lang.Override public com.google.recaptchaenterprise.v1.AccountDefenderAssessment getDefaultInstanceForType() { return com.google.recaptchaenterprise.v1.AccountDefenderAssessment.getDefaultInstance(); } @java.lang.Override public com.google.recaptchaenterprise.v1.AccountDefenderAssessment build() { com.google.recaptchaenterprise.v1.AccountDefenderAssessment result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.recaptchaenterprise.v1.AccountDefenderAssessment buildPartial() { com.google.recaptchaenterprise.v1.AccountDefenderAssessment result = new com.google.recaptchaenterprise.v1.AccountDefenderAssessment(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.recaptchaenterprise.v1.AccountDefenderAssessment result) { if (((bitField0_ & 0x00000001) != 0)) { labels_ = java.util.Collections.unmodifiableList(labels_); bitField0_ = (bitField0_ & ~0x00000001); } result.labels_ = labels_; } private void buildPartial0(com.google.recaptchaenterprise.v1.AccountDefenderAssessment result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.recaptchaenterprise.v1.AccountDefenderAssessment) { return mergeFrom((com.google.recaptchaenterprise.v1.AccountDefenderAssessment) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.recaptchaenterprise.v1.AccountDefenderAssessment other) { if (other == com.google.recaptchaenterprise.v1.AccountDefenderAssessment.getDefaultInstance()) return this; if (!other.labels_.isEmpty()) { if (labels_.isEmpty()) { labels_ = other.labels_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureLabelsIsMutable(); labels_.addAll(other.labels_); } onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { int tmpRaw = input.readEnum(); ensureLabelsIsMutable(); labels_.add(tmpRaw); break; } // case 8 case 10: { int length = input.readRawVarint32(); int oldLimit = input.pushLimit(length); while (input.getBytesUntilLimit() > 0) { int tmpRaw = input.readEnum(); ensureLabelsIsMutable(); labels_.add(tmpRaw); } input.popLimit(oldLimit); break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<java.lang.Integer> labels_ = java.util.Collections.emptyList(); private void ensureLabelsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { labels_ = new java.util.ArrayList<java.lang.Integer>(labels_); bitField0_ |= 0x00000001; } } /** * * * <pre> * Output only. Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel labels = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return A list containing the labels. */ public java.util.List< com.google.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel> getLabelsList() { return new com.google.protobuf.Internal.ListAdapter< java.lang.Integer, com.google.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel>( labels_, labels_converter_); } /** * * * <pre> * Output only. Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel labels = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The count of labels. */ public int getLabelsCount() { return labels_.size(); } /** * * * <pre> * Output only. Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel labels = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @param index The index of the element to return. * @return The labels at the given index. */ public com.google.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel getLabels(int index) { return labels_converter_.convert(labels_.get(index)); } /** * * * <pre> * Output only. Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel labels = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @param index The index to set the value at. * @param value The labels to set. * @return This builder for chaining. */ public Builder setLabels( int index, com.google.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel value) { if (value == null) { throw new NullPointerException(); } ensureLabelsIsMutable(); labels_.set(index, value.getNumber()); onChanged(); return this; } /** * * * <pre> * Output only. Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel labels = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @param value The labels to add. * @return This builder for chaining. */ public Builder addLabels( com.google.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel value) { if (value == null) { throw new NullPointerException(); } ensureLabelsIsMutable(); labels_.add(value.getNumber()); onChanged(); return this; } /** * * * <pre> * Output only. Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel labels = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @param values The labels to add. * @return This builder for chaining. */ public Builder addAllLabels( java.lang.Iterable< ? extends com.google.recaptchaenterprise.v1.AccountDefenderAssessment .AccountDefenderLabel> values) { ensureLabelsIsMutable(); for (com.google.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel value : values) { labels_.add(value.getNumber()); } onChanged(); return this; } /** * * * <pre> * Output only. Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel labels = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return This builder for chaining. */ public Builder clearLabels() { labels_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Output only. Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel labels = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return A list containing the enum numeric values on the wire for labels. */ public java.util.List<java.lang.Integer> getLabelsValueList() { return java.util.Collections.unmodifiableList(labels_); } /** * * * <pre> * Output only. Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel labels = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @param index The index of the value to return. * @return The enum numeric value on the wire of labels at the given index. */ public int getLabelsValue(int index) { return labels_.get(index); } /** * * * <pre> * Output only. Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel labels = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @param index The index to set the value at. * @param value The enum numeric value on the wire for labels to set. * @return This builder for chaining. */ public Builder setLabelsValue(int index, int value) { ensureLabelsIsMutable(); labels_.set(index, value); onChanged(); return this; } /** * * * <pre> * Output only. Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel labels = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @param value The enum numeric value on the wire for labels to add. * @return This builder for chaining. */ public Builder addLabelsValue(int value) { ensureLabelsIsMutable(); labels_.add(value); onChanged(); return this; } /** * * * <pre> * Output only. Labels for this request. * </pre> * * <code> * repeated .google.cloud.recaptchaenterprise.v1.AccountDefenderAssessment.AccountDefenderLabel labels = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @param values The enum numeric values on the wire for labels to add. * @return This builder for chaining. */ public Builder addAllLabelsValue(java.lang.Iterable<java.lang.Integer> values) { ensureLabelsIsMutable(); for (int value : values) { labels_.add(value); } onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.recaptchaenterprise.v1.AccountDefenderAssessment) } // @@protoc_insertion_point(class_scope:google.cloud.recaptchaenterprise.v1.AccountDefenderAssessment) private static final com.google.recaptchaenterprise.v1.AccountDefenderAssessment DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.recaptchaenterprise.v1.AccountDefenderAssessment(); } public static com.google.recaptchaenterprise.v1.AccountDefenderAssessment getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<AccountDefenderAssessment> PARSER = new com.google.protobuf.AbstractParser<AccountDefenderAssessment>() { @java.lang.Override public AccountDefenderAssessment parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<AccountDefenderAssessment> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<AccountDefenderAssessment> getParserForType() { return PARSER; } @java.lang.Override public com.google.recaptchaenterprise.v1.AccountDefenderAssessment getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
oracle/graal
36,848
substratevm/src/com.oracle.svm.hosted/src/com/oracle/svm/hosted/c/info/InfoTreeBuilder.java
/* * Copyright (c) 2013, 2017, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.oracle.svm.hosted.c.info; import java.lang.annotation.Annotation; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.TreeMap; import org.graalvm.nativeimage.c.constant.CConstant; import org.graalvm.nativeimage.c.constant.CEnum; import org.graalvm.nativeimage.c.constant.CEnumConstant; import org.graalvm.nativeimage.c.constant.CEnumLookup; import org.graalvm.nativeimage.c.constant.CEnumValue; import org.graalvm.nativeimage.c.struct.CBitfield; import org.graalvm.nativeimage.c.struct.CField; import org.graalvm.nativeimage.c.struct.CFieldAddress; import org.graalvm.nativeimage.c.struct.CFieldOffset; import org.graalvm.nativeimage.c.struct.CPointerTo; import org.graalvm.nativeimage.c.struct.CStruct; import org.graalvm.nativeimage.c.struct.RawField; import org.graalvm.nativeimage.c.struct.RawFieldAddress; import org.graalvm.nativeimage.c.struct.RawFieldOffset; import org.graalvm.nativeimage.c.struct.RawPointerTo; import org.graalvm.nativeimage.c.struct.RawStructure; import org.graalvm.nativeimage.c.struct.UniqueLocationIdentity; import org.graalvm.nativeimage.c.type.CTypedef; import org.graalvm.word.PointerBase; import com.oracle.graal.pointsto.infrastructure.WrappedElement; import com.oracle.graal.pointsto.infrastructure.WrappedJavaType; import com.oracle.graal.pointsto.meta.AnalysisMethod; import com.oracle.graal.pointsto.meta.AnalysisType; import com.oracle.graal.pointsto.util.GraalAccess; import com.oracle.svm.core.annotate.TargetElement; import com.oracle.svm.core.c.struct.PinnedObjectField; import com.oracle.svm.core.util.VMError; import com.oracle.svm.hosted.c.BuiltinDirectives; import com.oracle.svm.hosted.c.NativeCodeContext; import com.oracle.svm.hosted.c.NativeLibraries; import com.oracle.svm.hosted.c.info.AccessorInfo.AccessorKind; import com.oracle.svm.hosted.c.info.SizableInfo.ElementKind; import com.oracle.svm.hosted.cenum.CEnumCallWrapperMethod; import com.oracle.svm.hosted.substitute.AnnotationSubstitutionProcessor; import com.oracle.svm.util.ClassUtil; import jdk.graal.compiler.bytecode.BridgeMethodUtils; import jdk.graal.compiler.phases.util.Providers; import jdk.vm.ci.meta.JavaConstant; import jdk.vm.ci.meta.JavaKind; import jdk.vm.ci.meta.JavaType; import jdk.vm.ci.meta.MetaAccessProvider; import jdk.vm.ci.meta.ResolvedJavaField; import jdk.vm.ci.meta.ResolvedJavaMethod; import jdk.vm.ci.meta.ResolvedJavaType; public class InfoTreeBuilder { private final Providers originalProviders; private final NativeLibraries nativeLibs; private final NativeCodeContext codeCtx; private final NativeCodeInfo nativeCodeInfo; public InfoTreeBuilder(NativeLibraries nativeLibs, NativeCodeContext codeCtx) { this.nativeLibs = nativeLibs; this.codeCtx = codeCtx; boolean isBuiltin = codeCtx.getDirectives() instanceof BuiltinDirectives; String name; if (codeCtx.getDirectives() != null) { name = ClassUtil.getUnqualifiedName(codeCtx.getDirectives().getClass()); } else { StringBuilder nameBuilder = new StringBuilder(); String sep = ""; for (String headerFile : codeCtx.getDirectives().getHeaderFiles()) { nameBuilder.append(sep).append(headerFile); sep = "_"; } name = nameBuilder.toString(); } this.nativeCodeInfo = new NativeCodeInfo(name, codeCtx.getDirectives(), isBuiltin); originalProviders = GraalAccess.getOriginalProviders(); } public NativeCodeInfo construct() { for (ResolvedJavaMethod method : codeCtx.getConstantAccessors()) { createConstantInfo(method); } for (ResolvedJavaType type : codeCtx.getStructTypes()) { createStructInfo(type); } for (ResolvedJavaType type : codeCtx.getRawStructTypes()) { createRawStructInfo(type); } for (ResolvedJavaType type : codeCtx.getCPointerToTypes()) { createCPointerToInfo(type); } for (ResolvedJavaType type : codeCtx.getRawPointerToTypes()) { createRawPointerToInfo(type); } for (ResolvedJavaType type : codeCtx.getEnumTypes()) { createEnumInfo(type); } return nativeCodeInfo; } private MetaAccessProvider getMetaAccess() { return nativeLibs.getMetaAccess(); } protected void createConstantInfo(ResolvedJavaMethod method) { int actualParamCount = getParameterCount(method); if (actualParamCount != 0) { nativeLibs.addError("Wrong number of parameters: expected 0; found " + actualParamCount, method); return; } ResolvedJavaType returnType = AccessorInfo.getReturnType(method); if (returnType.getJavaKind() == JavaKind.Void || (returnType.getJavaKind() == JavaKind.Object && !nativeLibs.isString(returnType) && !nativeLibs.isByteArray(returnType) && !nativeLibs.isWordBase(returnType))) { nativeLibs.addError("Wrong return type: expected a primitive type, String, byte[], or a Word type; found " + returnType.toJavaName(true), method); return; } String constantName = getConstantName(method); ElementKind elementKind = elementKind(returnType, false); ConstantInfo constantInfo = new ConstantInfo(constantName, elementKind, method); nativeCodeInfo.adoptChild(constantInfo); nativeLibs.registerElementInfo(method, constantInfo); } private void createCPointerToInfo(ResolvedJavaType type) { if (!validInterfaceDefinition(type, CPointerTo.class)) { return; } List<AccessorInfo> accessorInfos = new ArrayList<>(); for (ResolvedJavaMethod method : type.getDeclaredMethods(false)) { AccessorKind accessorKind = returnsDeclaringClass(method) ? AccessorKind.ADDRESS : getAccessorKind(method); boolean isIndexed = getParameterCount(method) > (accessorKind == AccessorKind.SETTER ? 1 : 0); AccessorInfo accessorInfo = new AccessorInfo(method, accessorKind, isIndexed, false, false); if (accessorValid(accessorInfo)) { accessorInfos.add(accessorInfo); nativeLibs.registerElementInfo(method, accessorInfo); } } String typeName = getCPointerToTypeName(type); String typedefName = getTypedefName(type); PointerToInfo pointerToInfo = new PointerToInfo(typeName, typedefName, elementKind(accessorInfos), type); pointerToInfo.adoptChildren(accessorInfos); nativeCodeInfo.adoptChild(pointerToInfo); nativeLibs.registerElementInfo(type, pointerToInfo); } private void createRawPointerToInfo(ResolvedJavaType type) { if (!validInterfaceDefinition(type, RawPointerTo.class)) { return; } List<AccessorInfo> accessorInfos = new ArrayList<>(); for (ResolvedJavaMethod method : type.getDeclaredMethods(false)) { AccessorKind accessorKind = returnsDeclaringClass(method) ? AccessorKind.ADDRESS : getAccessorKind(method); boolean isIndexed = getParameterCount(method) > (accessorKind == AccessorKind.SETTER ? 1 : 0); AccessorInfo accessorInfo = new AccessorInfo(method, accessorKind, isIndexed, false, false); if (accessorValid(accessorInfo)) { accessorInfos.add(accessorInfo); nativeLibs.registerElementInfo(method, accessorInfo); } } String typeName = getRawPointerToTypeName(type); RawPointerToInfo pointerToInfo = new RawPointerToInfo(typeName, elementKind(accessorInfos), type); pointerToInfo.adoptChildren(accessorInfos); nativeCodeInfo.adoptChild(pointerToInfo); nativeLibs.registerElementInfo(type, pointerToInfo); } private static int getParameterCount(ResolvedJavaMethod method) { return method.getSignature().getParameterCount(false); } private static boolean returnsDeclaringClass(ResolvedJavaMethod accessor) { return AccessorInfo.getReturnType(accessor).equals(accessor.getDeclaringClass()); } private static AccessorKind getAccessorKind(ResolvedJavaMethod accessor) { return accessor.getSignature().getReturnKind() == JavaKind.Void ? AccessorKind.SETTER : AccessorKind.GETTER; } public static String getTypedefName(ResolvedJavaType type) { CTypedef typedefAnnotation = type.getAnnotation(CTypedef.class); return typedefAnnotation != null ? typedefAnnotation.name() : null; } private void createStructInfo(ResolvedJavaType type) { if (!validInterfaceDefinition(type, CStruct.class)) { return; } Map<String, List<AccessorInfo>> fieldAccessorInfos = new TreeMap<>(); Map<String, List<AccessorInfo>> bitfieldAccessorInfos = new TreeMap<>(); List<AccessorInfo> structAccessorInfos = new ArrayList<>(); for (ResolvedJavaMethod method : type.getDeclaredMethods(false)) { if (!AnnotationSubstitutionProcessor.isIncluded(method.getAnnotation(TargetElement.class), ((AnalysisType) method.getDeclaringClass()).getJavaClass(), method)) { continue; } CField fieldAnnotation = getMethodAnnotation(method, CField.class); CFieldAddress fieldAddressAnnotation = getMethodAnnotation(method, CFieldAddress.class); CFieldOffset fieldOffsetAnnotation = getMethodAnnotation(method, CFieldOffset.class); CBitfield bitfieldAnnotation = getMethodAnnotation(method, CBitfield.class); final AccessorInfo accessorInfo; final String fieldName; if (fieldAnnotation != null) { accessorInfo = new AccessorInfo(method, getAccessorKind(method), false, hasLocationIdentityParameter(method), hasUniqueLocationIdentity(method)); fieldName = getStructFieldName(accessorInfo, fieldAnnotation.value()); } else if (bitfieldAnnotation != null) { accessorInfo = new AccessorInfo(method, getAccessorKind(method), false, hasLocationIdentityParameter(method), false); fieldName = getStructFieldName(accessorInfo, bitfieldAnnotation.value()); } else if (fieldAddressAnnotation != null) { accessorInfo = new AccessorInfo(method, AccessorKind.ADDRESS, false, false, false); fieldName = getStructFieldName(accessorInfo, fieldAddressAnnotation.value()); } else if (fieldOffsetAnnotation != null) { accessorInfo = new AccessorInfo(method, AccessorKind.OFFSET, false, false, false); fieldName = getStructFieldName(accessorInfo, fieldOffsetAnnotation.value()); } else if (returnsDeclaringClass(method)) { accessorInfo = new AccessorInfo(method, AccessorKind.ADDRESS, getParameterCount(method) > 0, false, false); fieldName = null; } else { nativeLibs.addError("Unexpected method without annotation", method); continue; } if (accessorValid(accessorInfo)) { if (fieldName == null) { structAccessorInfos.add(accessorInfo); } else { Map<String, List<AccessorInfo>> map = bitfieldAnnotation != null ? bitfieldAccessorInfos : fieldAccessorInfos; List<AccessorInfo> accessorInfos = map.computeIfAbsent(fieldName, _ -> new ArrayList<>()); accessorInfos.add(accessorInfo); } nativeLibs.registerElementInfo(method, accessorInfo); } } StructInfo structInfo = StructInfo.create(getStructName(type), type); structInfo.adoptChildren(structAccessorInfos); for (Map.Entry<String, List<AccessorInfo>> entry : fieldAccessorInfos.entrySet()) { StructFieldInfo fieldInfo = new StructFieldInfo(entry.getKey(), elementKind(entry.getValue())); fieldInfo.adoptChildren(entry.getValue()); structInfo.adoptChild(fieldInfo); } for (Map.Entry<String, List<AccessorInfo>> entry : bitfieldAccessorInfos.entrySet()) { if (fieldAccessorInfos.containsKey(entry.getKey())) { nativeLibs.addError("Bitfield and regular field accessor methods cannot be mixed", entry.getValue(), fieldAccessorInfos.get(entry.getKey())); } else if (elementKind(entry.getValue()) != ElementKind.INTEGER) { nativeLibs.addError("Bitfield accessor method must have integer kind", entry.getValue()); } StructBitfieldInfo bitfieldInfo = new StructBitfieldInfo(entry.getKey()); bitfieldInfo.adoptChildren(entry.getValue()); structInfo.adoptChild(bitfieldInfo); } nativeCodeInfo.adoptChild(structInfo); nativeLibs.registerElementInfo(type, structInfo); } private void createRawStructInfo(ResolvedJavaType type) { if (!validInterfaceDefinition(type, RawStructure.class)) { return; } Map<String, List<AccessorInfo>> fieldAccessorInfos = new TreeMap<>(); List<AccessorInfo> structAccessorInfos = new ArrayList<>(); for (ResolvedJavaMethod method : type.getDeclaredMethods(false)) { final AccessorInfo accessorInfo; final String fieldName; RawField fieldAnnotation = getMethodAnnotation(method, RawField.class); RawFieldAddress fieldAddressAnnotation = getMethodAnnotation(method, RawFieldAddress.class); RawFieldOffset fieldOffsetAnnotation = getMethodAnnotation(method, RawFieldOffset.class); if (fieldAnnotation != null) { accessorInfo = new AccessorInfo(method, getAccessorKind(method), false, hasLocationIdentityParameter(method), hasUniqueLocationIdentity(method)); fieldName = getStructFieldName(accessorInfo, ""); } else if (fieldAddressAnnotation != null) { accessorInfo = new AccessorInfo(method, AccessorKind.ADDRESS, false, false, false); fieldName = getStructFieldName(accessorInfo, ""); } else if (fieldOffsetAnnotation != null) { accessorInfo = new AccessorInfo(method, AccessorKind.OFFSET, false, false, false); fieldName = getStructFieldName(accessorInfo, ""); } else if (returnsDeclaringClass(method)) { accessorInfo = new AccessorInfo(method, AccessorKind.ADDRESS, getParameterCount(method) > 0, false, false); fieldName = null; } else { nativeLibs.addError("Unexpected method without annotation", method); continue; } if (accessorValid(accessorInfo)) { if (fieldName == null) { structAccessorInfos.add(accessorInfo); } else { List<AccessorInfo> accessorInfos = fieldAccessorInfos.computeIfAbsent(fieldName, _ -> new ArrayList<>()); accessorInfos.add(accessorInfo); } nativeLibs.registerElementInfo(method, accessorInfo); } } StructInfo structInfo = StructInfo.create(getStructName(type), type); structInfo.adoptChildren(structAccessorInfos); for (Map.Entry<String, List<AccessorInfo>> entry : fieldAccessorInfos.entrySet()) { StructFieldInfo fieldInfo = new StructFieldInfo(entry.getKey(), elementKind(entry.getValue())); fieldInfo.adoptChildren(entry.getValue()); structInfo.adoptChild(fieldInfo); verifyRawStructFieldAccessors(fieldInfo); } nativeCodeInfo.adoptChild(structInfo); nativeLibs.registerElementInfo(type, structInfo); } private void verifyRawStructFieldAccessors(StructFieldInfo fieldInfo) { boolean hasGetter = false; boolean hasSetter = false; for (ElementInfo child : fieldInfo.getChildren()) { if (child instanceof AccessorInfo) { AccessorKind kind = ((AccessorInfo) child).getAccessorKind(); if (kind == AccessorKind.GETTER) { hasGetter = true; } else if (kind == AccessorKind.SETTER) { hasSetter = true; } else if (kind == AccessorKind.ADDRESS || kind == AccessorKind.OFFSET) { hasGetter = true; hasSetter = true; } else { throw VMError.shouldNotReachHere("Unexpected accessor kind: " + kind); } } } if (!hasSetter) { nativeLibs.addError(String.format("%s.%s does not have a setter. @RawStructure fields need both a getter and a setter.", fieldInfo.getParent().getName(), fieldInfo.getName())); } if (!hasGetter) { nativeLibs.addError(String.format("%s.%s does not have a getter. @RawStructure fields need both a getter and a setter.", fieldInfo.getParent().getName(), fieldInfo.getName())); } } private boolean hasLocationIdentityParameter(ResolvedJavaMethod method) { int parameterCount = getParameterCount(method); if (parameterCount == 0) { return false; } JavaType lastParam = AccessorInfo.getParameterType(method, parameterCount - 1); return nativeLibs.getLocationIdentityType().equals(lastParam); } private static boolean hasUniqueLocationIdentity(ResolvedJavaMethod method) { return getMethodAnnotation(method, UniqueLocationIdentity.class) != null; } private ElementKind elementKind(Collection<AccessorInfo> accessorInfos) { ElementKind overallKind = ElementKind.UNKNOWN; AccessorInfo overallKindAccessor = null; for (AccessorInfo accessorInfo : accessorInfos) { ResolvedJavaType type; switch (accessorInfo.getAccessorKind()) { case GETTER -> type = accessorInfo.getReturnType(); case SETTER -> type = accessorInfo.getValueParameterType(); default -> { continue; } } ResolvedJavaMethod method = accessorInfo.getAnnotatedElement(); ElementKind newKind = elementKind(type, isPinnedObjectFieldAccessor(method)); if (overallKind == ElementKind.UNKNOWN) { overallKind = newKind; overallKindAccessor = accessorInfo; } else if (overallKind != newKind) { nativeLibs.addError("Accessor methods mix integer, floating point, and pointer kinds", overallKindAccessor.getAnnotatedElement(), method); } } return overallKind; } private ElementKind elementKind(ResolvedJavaType type, boolean isPinnedObject) { switch (type.getJavaKind()) { case Boolean, Byte, Char, Short, Int, Long -> { return ElementKind.INTEGER; } case Float, Double -> { return ElementKind.FLOAT; } case Object -> { if (nativeLibs.isIntegerType(type)) { return ElementKind.INTEGER; } else if (isPinnedObject) { return ElementKind.OBJECT; } else if (nativeLibs.isString(type)) { return ElementKind.STRING; } else if (nativeLibs.isByteArray(type)) { return ElementKind.BYTEARRAY; } else { return ElementKind.POINTER; } } default -> { return ElementKind.UNKNOWN; } } } private static boolean isPinnedObjectFieldAccessor(ResolvedJavaMethod method) { return getMethodAnnotation(method, PinnedObjectField.class) != null; } private boolean accessorValid(AccessorInfo accessorInfo) { ResolvedJavaMethod method = accessorInfo.getAnnotatedElement(); int expectedParamCount = accessorInfo.parameterCount(false); int actualParamCount = getParameterCount(method); if (actualParamCount != expectedParamCount) { nativeLibs.addError("Wrong number of parameters: expected " + expectedParamCount + "; found " + actualParamCount, method); return false; } if (accessorInfo.isIndexed()) { ResolvedJavaType paramType = accessorInfo.getParameterType(accessorInfo.indexParameterNumber(false)); if (!nativeLibs.isIntegerType(paramType)) { nativeLibs.addError("Wrong type of index parameter 0: expected an integer type; found " + paramType.toJavaName(true), method); return false; } } if (accessorInfo.hasLocationIdentityParameter() && accessorInfo.hasUniqueLocationIdentity()) { nativeLibs.addError("Method cannot have annotation @" + UniqueLocationIdentity.class.getSimpleName() + " and a LocationIdentity parameter", method); return false; } if (accessorInfo.hasLocationIdentityParameter()) { ResolvedJavaType paramType = accessorInfo.getParameterType(accessorInfo.locationIdentityParameterNumber(false)); if (!nativeLibs.getLocationIdentityType().equals(paramType)) { nativeLibs.addError("Wrong type of locationIdentity parameter: expected " + nativeLibs.getLocationIdentityType().toJavaName(true) + "; found " + paramType.toJavaName(true), method); return false; } } ResolvedJavaType returnType = AccessorInfo.getReturnType(method); if (!checkObjectType(returnType, method)) { return false; } switch (accessorInfo.getAccessorKind()) { case ADDRESS -> { if (!nativeLibs.isPointerBase(returnType) || nativeLibs.isIntegerType(returnType)) { nativeLibs.addError("Wrong return type: expected a pointer type; found " + returnType.toJavaName(true), method); return false; } } case OFFSET -> { if (!nativeLibs.isIntegerType(returnType)) { nativeLibs.addError("Wrong return type: expected an integer type; found " + returnType.toJavaName(true), method); return false; } } case SETTER -> { if (!checkObjectType(accessorInfo.getValueParameterType(), method)) { return false; } } } return true; } private boolean checkObjectType(ResolvedJavaType type, ResolvedJavaMethod method) { if (type.getJavaKind() == JavaKind.Void) { return true; } else if (type.getJavaKind() == JavaKind.Object && !nativeLibs.isWordBase(type)) { if (!isPinnedObjectFieldAccessor(method)) { nativeLibs.addError("Wrong type: expected a primitive type or a Word type; found " + type.toJavaName(true) + ". Use the annotation @" + PinnedObjectField.class.getSimpleName() + " if you know what you are doing.", method); return false; } } else if (isPinnedObjectFieldAccessor(method)) { nativeLibs.addError("Wrong type: expected an object type; found " + type.toJavaName(true) + ". The annotation @" + PinnedObjectField.class.getSimpleName() + " may only be used for object fields.", method); return false; } return true; } private boolean validInterfaceDefinition(ResolvedJavaType type, Class<? extends Annotation> annotationClass) { assert type.getAnnotation(annotationClass) != null; if (!type.isInterface() || !nativeLibs.isPointerBase(type)) { nativeLibs.addError("Annotation @" + ClassUtil.getUnqualifiedName(annotationClass) + " can only be used on an interface that extends " + PointerBase.class.getSimpleName(), type); return false; } return true; } private static String removePrefix(String name, String prefix) { assert !prefix.isEmpty(); String result = name; if (result.startsWith(prefix)) { result = result.substring(prefix.length()); if (result.startsWith("_")) { result = result.substring("_".length()); } } return result; } private static String getConstantName(ResolvedJavaMethod method) { CConstant constantAnnotation = getMethodAnnotation(method, CConstant.class); String name = constantAnnotation.value(); if (name.isEmpty()) { name = method.getName(); /* Remove "get" prefix for automatically inferred names. */ name = removePrefix(name, "get"); } return name; } private String getCPointerToTypeName(ResolvedJavaType type) { CPointerTo pointerToAnnotation = type.getAnnotation(CPointerTo.class); Class<?> pointerToType = pointerToAnnotation.value(); String nameOfCType = pointerToAnnotation.nameOfCType(); CStruct pointerToCStructAnnotation; CPointerTo pointerToCPointerAnnotation; do { pointerToCStructAnnotation = pointerToType.getAnnotation(CStruct.class); pointerToCPointerAnnotation = pointerToType.getAnnotation(CPointerTo.class); if (pointerToCStructAnnotation != null || pointerToCPointerAnnotation != null) { break; } pointerToType = pointerToType.getInterfaces().length == 1 ? pointerToType.getInterfaces()[0] : null; } while (pointerToType != null); int n = (!nameOfCType.isEmpty() ? 1 : 0) + (pointerToCStructAnnotation != null ? 1 : 0) + (pointerToCPointerAnnotation != null ? 1 : 0); if (n != 1) { nativeLibs.addError("Exactly one of " + // "1) literal C type name, " + // "2) class annotated with @" + CStruct.class.getSimpleName() + ", or " + // "3) class annotated with @" + CPointerTo.class.getSimpleName() + " must be specified in @" + CPointerTo.class.getSimpleName() + " annotation", type); return "__error"; } if (pointerToCStructAnnotation != null) { return getStructName(getMetaAccess().lookupJavaType(pointerToType)) + "*"; } else if (pointerToCPointerAnnotation != null) { return getCPointerToTypeName(getMetaAccess().lookupJavaType(pointerToType)) + "*"; } else { return nameOfCType; } } private String getRawPointerToTypeName(ResolvedJavaType type) { RawPointerTo pointerToAnnotation = type.getAnnotation(RawPointerTo.class); Class<?> pointerToType = pointerToAnnotation.value(); RawStructure pointerToRawStructAnnotation; RawPointerTo pointerToRawPointerAnnotation; do { pointerToRawStructAnnotation = pointerToType.getAnnotation(RawStructure.class); pointerToRawPointerAnnotation = pointerToType.getAnnotation(RawPointerTo.class); if (pointerToRawStructAnnotation != null || pointerToRawPointerAnnotation != null) { break; } pointerToType = pointerToType.getInterfaces().length == 1 ? pointerToType.getInterfaces()[0] : null; } while (pointerToType != null); int n = (pointerToRawStructAnnotation != null ? 1 : 0) + (pointerToRawPointerAnnotation != null ? 1 : 0); if (n != 1) { nativeLibs.addError("Exactly one of " + // "1) class annotated with @" + RawStructure.class.getSimpleName() + ", or " + // "2) class annotated with @" + RawPointerTo.class.getSimpleName() + " must be specified in @" + RawPointerTo.class.getSimpleName() + " annotation", type); return "__error"; } if (pointerToRawStructAnnotation != null) { return getStructName(getMetaAccess().lookupJavaType(pointerToType)) + "*"; } else { assert pointerToRawPointerAnnotation != null; return getRawPointerToTypeName(getMetaAccess().lookupJavaType(pointerToType)) + "*"; } } private static String getStructName(ResolvedJavaType type) { CStruct structAnnotation = type.getAnnotation(CStruct.class); if (structAnnotation == null) { RawStructure rsanno = type.getAnnotation(RawStructure.class); assert rsanno != null : "Unexpected struct type " + type; return getSimpleJavaName(type); } String name = structAnnotation.value(); if (name.isEmpty()) { name = getSimpleJavaName(type); } if (structAnnotation.addStructKeyword()) { name = "struct " + name; } return name; } private static String getSimpleJavaName(ResolvedJavaType type) { String name = type.toJavaName(false); int innerClassSeparator = name.lastIndexOf('$'); if (innerClassSeparator >= 0) { name = name.substring(innerClassSeparator + 1); } return name; } private static String getStructFieldName(AccessorInfo info, String annotationValue) { if (!annotationValue.isEmpty()) { return annotationValue; } else { return removePrefix(info.getAnnotatedElement().getName(), info.getAccessorPrefix()); } } private void createEnumInfo(ResolvedJavaType type) { if (!nativeLibs.isEnum(type)) { nativeLibs.addError("Annotation @" + CEnum.class.getSimpleName() + " can only be used on an Java enumeration", type); return; } CEnum annotation = type.getAnnotation(CEnum.class); String name = annotation.value(); if (name.isEmpty()) { name = "int"; } else if (annotation.addEnumKeyword()) { name = "enum " + name; } EnumInfo enumInfo = new EnumInfo(name, type); /* Use the wrapped type to avoid registering all CEnum annotated classes as reachable. */ ResolvedJavaType wrappedType = ((WrappedJavaType) type).getWrapped(); for (ResolvedJavaField field : wrappedType.getStaticFields()) { assert Modifier.isStatic(field.getModifiers()); if (Modifier.isFinal(field.getModifiers()) && field.getType().equals(wrappedType)) { createEnumConstantInfo(enumInfo, field); } } for (ResolvedJavaMethod method : type.getDeclaredMethods(false)) { AnalysisMethod analysisMethod = (AnalysisMethod) method; CEnumValue cEnumValue = getMethodAnnotation(method, CEnumValue.class); if (cEnumValue != null) { addCEnumValueMethod(enumInfo, analysisMethod); } CEnumLookup cEnumLookup = getMethodAnnotation(method, CEnumLookup.class); if (cEnumLookup != null) { addCEnumLookupMethod(enumInfo, analysisMethod); } } nativeCodeInfo.adoptChild(enumInfo); nativeLibs.registerElementInfo(type, enumInfo); } private void createEnumConstantInfo(EnumInfo enumInfo, ResolvedJavaField field) { JavaConstant enumValue = originalProviders.getConstantReflection().readFieldValue(field, null); ResolvedJavaType originalType = originalProviders.getMetaAccess().lookupJavaType(enumValue); assert enumValue.isNonNull() && originalType.equals(((WrappedElement) enumInfo.getAnnotatedElement()).getWrapped()); CEnumConstant fieldAnnotation = field.getAnnotation(CEnumConstant.class); String name = ""; boolean includeInLookup = true; if (fieldAnnotation != null) { name = fieldAnnotation.value(); includeInLookup = fieldAnnotation.includeInLookup(); } if (name.isEmpty()) { name = field.getName(); } Enum<?> value = originalProviders.getSnippetReflection().asObject(Enum.class, enumValue); EnumConstantInfo constantInfo = new EnumConstantInfo(name, field, includeInLookup, value); enumInfo.adoptChild(constantInfo); } private void addCEnumValueMethod(EnumInfo enumInfo, AnalysisMethod method) { /* Check the modifiers of the original method. The synthetic method is not native. */ ResolvedJavaMethod originalMethod = originalMethod(method); if (!Modifier.isNative(originalMethod.getModifiers()) || Modifier.isStatic(originalMethod.getModifiers())) { nativeLibs.addError("Method annotated with @" + CEnumValue.class.getSimpleName() + " must be a non-static native method", method); return; } else if (getParameterCount(method) != 0) { nativeLibs.addError("Method annotated with @" + CEnumValue.class.getSimpleName() + " cannot have parameters", method); return; } else if (!nativeLibs.isIntegerType(AccessorInfo.getReturnType(method))) { nativeLibs.addError("Method annotated with @" + CEnumValue.class.getSimpleName() + " must return an integer type", method); return; } enumInfo.addCEnumValueMethod(method); } private void addCEnumLookupMethod(EnumInfo enumInfo, AnalysisMethod method) { /* Check the modifiers of the original method. The synthetic method is not native. */ ResolvedJavaMethod originalMethod = originalMethod(method); if (!Modifier.isNative(originalMethod.getModifiers()) || !Modifier.isStatic(originalMethod.getModifiers())) { nativeLibs.addError("Method annotated with @" + CEnumLookup.class.getSimpleName() + " must be a static native method", method); return; } else if (getParameterCount(method) != 1 || elementKind(AccessorInfo.getParameterType(method, 0), false) != ElementKind.INTEGER) { nativeLibs.addError("Method annotated with @" + CEnumLookup.class.getSimpleName() + " must have exactly one integer parameter", method); return; } else if (!returnsDeclaringClass(method)) { nativeLibs.addError("Return type of method annotated with @" + CEnumLookup.class.getSimpleName() + " must be the annotation type", method); return; } enumInfo.addCEnumLookupMethod(method); } private static ResolvedJavaMethod originalMethod(AnalysisMethod method) { assert method.getWrapped() instanceof CEnumCallWrapperMethod; CEnumCallWrapperMethod wrapperMethod = (CEnumCallWrapperMethod) method.getWrapped(); return wrapperMethod.getOriginal(); } private static <T extends Annotation> T getMethodAnnotation(ResolvedJavaMethod method, Class<T> annotationClass) { /* * The Eclipse Java compiler does not emit annotations for bridge methods that are emitted * when overwriting a method with covariant return types. As a workaround, we look up the * original method and use the annotations of the original method. */ return BridgeMethodUtils.getAnnotation(annotationClass, method); } }
apache/derby
36,467
java/org.apache.derby.tests/org/apache/derbyTesting/functionTests/tests/jdbcapi/CacheSessionDataTest.java
/* Derby - Class org.apache.derbyTesting.functionTests.tests.jdbcapi.CacheSessionDataTest Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derbyTesting.functionTests.tests.jdbcapi; import java.sql.CallableStatement; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.sql.Types; import java.util.Arrays; import junit.framework.Test; import org.apache.derby.client.am.ClientConnection; import org.apache.derbyTesting.junit.BaseJDBCTestCase; import org.apache.derbyTesting.junit.BaseTestSuite; import org.apache.derbyTesting.junit.CleanDatabaseTestSetup; import org.apache.derbyTesting.junit.JDBC; import org.apache.derbyTesting.junit.TestConfiguration; /** * Utility class for representing isolation levels. Provides a convenient way * to track the JDBC constant, the JDBC constant name and the SQL name of an * isolation level. By overriding equals and toString it becomes convenient to * use this class in JUnit's assertEquals methods. */ final class IsoLevel { private final int isoLevel_; private final String jdbcName_; private final String sqlName_; /** * Constructs an IsoLevel object from a ResultSet. The ResultSet must be * equivalent to 'SELECT * FROM ISOLATION_NAMES'. Calls next() on the * ResultSet, so the caller must position the ResultSet on the row before * the row that is to be used to create the IsoLevel object. * @param rs ResultSet holding isolation level descriptions * @throws java.sql.SQLException */ public IsoLevel(ResultSet rs) throws SQLException { rs.next(); isoLevel_ = rs.getInt("ISOLEVEL"); jdbcName_ = rs.getString("JDBCNAME"); sqlName_ = rs.getString("SQLNAME"); } public int getIsoLevel() { return isoLevel_; } public String getJdbcName() { return jdbcName_; } public String getSqlName() { return sqlName_; } public String toString() { return "(" + jdbcName_ + ", " + sqlName_ + ")"; } public boolean equals(Object that) { if (this == that) { return true; } if (that instanceof IsoLevel) { return (isoLevel_ == ((IsoLevel) that).isoLevel_); } return false; } public int hashCode() { return isoLevel_; } } /** * This is a test for DERBY-3192 (https://issues.apache.org/jira/browse/DERBY-3192) * which tries to avoid unecessary roundtrips by piggybacking session * information on the messages going back to the client. The goal is that * whenever a user requests session information from the client driver, the * correct information should already be available and no special roundtrip * be required. * So far the test only checks caching of the isolation level, but other * session attributes can be added later. The test attempts to "fool" * the caching mechanism by * modifying the isolation level without going through the client's * Connection.setTransactionIsolation method. * The effect of modifying the isolation level in and * out of XA transactions is covered by the XA tests and not tested here. */ public class CacheSessionDataTest extends BaseJDBCTestCase { public CacheSessionDataTest(String name) { super(name); } /** * Adds both the embedded and client-server versions of the baseSuite to * the Test. An empty BaseTestSuite is returned unless we have JDBC3 support, because all test cases call verifyCachedIsolation() which in turn makes use of getTransactionIsolationJDBC() (GET_TRANSACTION_ISOLATION_JDBC) which uses DriverManager to access the default connection. * @return the resulting Test object */ public static Test suite() { BaseTestSuite suite = new BaseTestSuite("CacheSessionDataTest"); if (JDBC.vmSupportsJDBC3()) { suite.addTest(baseSuite("CacheSessionDataTest:embedded")); suite.addTest(TestConfiguration.clientServerDecorator( baseSuite("CacheSessionDataTest:client"))); } return suite; } /** * Creates a new BaseTestSuite with all the tests, and wraps it in a CleanDatabaseSetup with a custom decorator. * @param name BaseTestSuite name * @return wrapped TestSuite */ private static Test baseSuite(String name) { BaseTestSuite suite = new BaseTestSuite(name); suite.addTestSuite(CacheSessionDataTest.class); return new CleanDatabaseTestSetup(suite) { /** * Creates the tables, stored procedures, and functions * shared by all test cases. * @throws SQLException */ protected void decorateSQL(Statement s) throws SQLException { s.execute("CREATE TABLE ISOLATION_NAMES(ISOLEVEL INT, JDBCNAME " + "VARCHAR(30), SQLNAME VARCHAR(2))"); PreparedStatement insert = s.getConnection().prepareStatement( "INSERT INTO ISOLATION_NAMES VALUES (?, ?, ?)"); insert.setInt(1, Connection.TRANSACTION_NONE); insert.setString(2, "TRANSACTION_NONE"); insert.setNull(3, Types.VARCHAR); insert.execute(); insert.setInt(1, Connection.TRANSACTION_READ_UNCOMMITTED); insert.setString(2, "TRANSACTION_READ_UNCOMMITTED"); insert.setString(3, "UR"); insert.execute(); insert.setInt(1, Connection.TRANSACTION_READ_COMMITTED); insert.setString(2, "TRANSACTION_READ_COMMITTED"); insert.setString(3, "CS"); insert.execute(); insert.setInt(1, Connection.TRANSACTION_REPEATABLE_READ); insert.setString(2, "TRANSACTION_REPEATABLE_READ"); insert.setString(3, "RS"); insert.execute(); insert.setInt(1, Connection.TRANSACTION_SERIALIZABLE); insert.setString(2, "TRANSACTION_SERIALIZABLE"); insert.setString(3, "RR"); insert.execute(); insert.close(); s.execute("CREATE TABLE BIG(C1 VARCHAR(32672), " + "C2 VARCHAR(32672), C3 VARCHAR(32672), C4 VARCHAR(32672))"); s.execute("CREATE PROCEDURE INSERTDATA1(IN A INT) LANGUAGE JAVA " + "PARAMETER STYLE JAVA EXTERNAL NAME " + "'org.apache.derbyTesting.functionTests.util." + "ProcedureTest.bigTestData'"); CallableStatement cs = s.getConnection().prepareCall("CALL INSERTDATA1(?)"); cs.setInt(1,9); for (int i = 0; i < 10; ++i) { cs.execute(); } ResultSet x = s.executeQuery("SELECT COUNT(*) FROM BIG"); x.next(); println("BIG has "+x.getInt(1)+" rows"); // Create procedures s.execute("CREATE PROCEDURE SET_ISOLATION_JDBC" + " (ISO INT) NO SQL LANGUAGE JAVA PARAMETER STYLE " + "JAVA EXTERNAL NAME '" + CacheSessionDataTest.class.getName() + ".setIsolationJDBC'"); s.execute("CREATE PROCEDURE SET_ISOLATION_SQL " + "(SQLNAME VARCHAR(2)) MODIFIES SQL DATA LANGUAGE JAVA PARAMETER STYLE " + "JAVA EXTERNAL NAME '" + CacheSessionDataTest.class.getName() + ".setIsolationSQL'"); // Create functions s.execute("CREATE FUNCTION GET_TRANSACTION_ISOLATION_JDBC " + "() RETURNS INT NO SQL LANGUAGE JAVA " + "PARAMETER STYLE JAVA EXTERNAL NAME '" + CacheSessionDataTest.class.getName() + ".getTransactionIsolationJDBC'"); s.execute("CREATE FUNCTION GET_CYCLE_ISOLATION_JDBC " + "() RETURNS INT NO SQL LANGUAGE JAVA " + "PARAMETER STYLE JAVA EXTERNAL NAME '" + CacheSessionDataTest.class.getName() + ".getCycleIsolationJDBC'"); s.execute("CREATE FUNCTION GET_CYCLE_ISOLATION_SQL " + "() RETURNS VARCHAR(2) READS SQL DATA LANGUAGE JAVA " + "PARAMETER STYLE JAVA EXTERNAL NAME '" + CacheSessionDataTest.class.getName() + ".getCycleIsolationSQL'"); // Schema testing s.execute("CREATE SCHEMA FOO"); String unicodeschema = "\u00bbMY\u20ac\u00ab"; s.execute("CREATE SCHEMA \"" + unicodeschema + "\""); s.execute("CREATE PROCEDURE APP.SET_SCHEMA (SCHEMANAME " + "VARCHAR(128)) MODIFIES SQL DATA LANGUAGE JAVA " + "PARAMETER STYLE JAVA EXTERNAL NAME '" + CacheSessionDataTest.class.getName() + ".setSchema'"); s.execute("CREATE FUNCTION APP.GET_SCHEMA_TRANSITION " + "(SCHEMANAME VARCHAR(128)) RETURNS VARCHAR(128) READS " + "SQL DATA LANGUAGE JAVA PARAMETER STYLE JAVA EXTERNAL " + "NAME '" + CacheSessionDataTest.class.getName() + ".getSchemaTransition'"); s.execute("CREATE TABLE APP.LARGE(X VARCHAR(32000), " + "SCHEMANAME VARCHAR(128), Y VARCHAR(32000))"); char[] carray = new char[32000]; Arrays.fill(carray, 'x'); String xs = new String(carray); Arrays.fill(carray, 'y'); String ys = new String(carray); s.execute("INSERT INTO APP.LARGE (SELECT '" + xs + "', " + "SCHEMANAME, " + " '" + ys + "' FROM SYS.SYSSCHEMAS)"); } }; } // End baseSuite /** * Turns off auto commit on the default connection and verifies that the * isolation level is read committed. Initailizes the array 'isoLevels' * with the 4 standard isolation levels if this has not already been done. * @throws java.sql.SQLException */ public void setUp() throws SQLException { Connection c = getConnection(); c.setAutoCommit(false); assertEquals(Connection.TRANSACTION_READ_COMMITTED, c.getTransactionIsolation()); if (isoLevels == null) { Statement s = createStatement(); ResultSet rs = s.executeQuery( "SELECT * FROM ISOLATION_NAMES WHERE ISOLEVEL > 0 " + "ORDER BY ISOLEVEL"); isoLevels = new IsoLevel[4]; for (int i = 0; i < 4; ++i) { isoLevels[i] = new IsoLevel(rs); println(isoLevels[i].toString()); } assertFalse(rs.next()); rs.close(); s.close(); } assertNotNull(isoLevels[0]); assertNotNull(isoLevels[1]); assertNotNull(isoLevels[2]); assertNotNull(isoLevels[3]); } /** * Removes all tables in schema APP which has the prefix 'T', before calling * super.tearDown(). * @throws java.lang.Exception */ public void tearDown() throws Exception { DatabaseMetaData meta = getConnection().getMetaData(); ResultSet tables = meta.getTables(null, "APP", "T%", null); Statement s = createStatement(); while (tables.next()) { s.execute("DROP TABLE " + tables.getString("TABLE_NAME")); } tables.close(); s.close(); commit(); super.tearDown(); } /** * Implementation of the stored procedure SET_ISOLATION_JDBC. * Sets the the isolation level given as argument on the default connection * using Connection.setTransactionIasolation. * @param isolation JDBC isolation level constant representing the * new isolation level * @throws java.sql.SQLException */ public static void setIsolationJDBC(int isolation) throws SQLException { Connection c = DriverManager.getConnection("jdbc:default:connection"); c.setTransactionIsolation(isolation); } /** * Implementation of the SQL function SET_ISOLATION_SQL. * Sets the the isolation level given as argument on the default connection * using SQL. * @param sqlName SQL string representing the new isolation level * @throws java.sql.SQLException */ public static void setIsolationSQL(String sqlName) throws SQLException { Connection c = DriverManager.getConnection("jdbc:default:connection"); Statement s = c.createStatement(); s.execute("SET ISOLATION " + sqlName); s.close(); } /** * Implementation of the SQL function GET_TRANSACTION_ISOLATION_JDBC. * Returns the isolation level reported by the default EmbedConnection * on the server. Used to verify that the isolation level reported by * the client is correct. * @return JDBC isolation level constant reported by the embedded driver * @throws java.sql.SQLException */ public static int getTransactionIsolationJDBC() throws SQLException { return DriverManager.getConnection("jdbc:default:connection"). getTransactionIsolation(); } /** * Implementation of the SQL function GET_CYCLE_ISOLATION_JDBC. * Cycles the isolation level on the default Connection. * @return the new JDBC isolation level constant * @throws java.sql.SQLException */ public static int getCycleIsolationJDBC() throws SQLException { Connection c = DriverManager.getConnection("jdbc:default:connection"); c.setTransactionIsolation(cycleIsolation().getIsoLevel()); println("getCycleIsolationJDBC() -> "+c.getTransactionIsolation()); return c.getTransactionIsolation(); } /** * Implementation of the SQL function GET_CYCLE_ISOLATION_SQL. * Cycles the isolation level on the default Connection. * @return the SQL name of the new isolation level * @throws java.sql.SQLException */ public static String getCycleIsolationSQL() throws SQLException { Connection c = DriverManager.getConnection("jdbc:default:connection"); Statement s = c.createStatement(); s.execute("SET ISOLATION "+cycleIsolation().getSqlName()); ResultSet rs = s.executeQuery("VALUES CURRENT ISOLATION"); rs.next(); String sqlName = rs.getString(1); rs.close(); s.close(); println("getCycleIsolationSQL() -> "+sqlName); return sqlName; } /** * Implementation of the SQL procedure SET_SCHEMA. * Sets a different schema on the default Connection. * @param schemaName name of the new schema * @throws java.sql.SQLException */ public static void setSchema(String schemaName) throws SQLException { Connection c = DriverManager.getConnection("jdbc:default:connection"); Statement s = c.createStatement(); s.execute("SET SCHEMA " + schemaName); s.close(); } /** * Implementation of the SQL function GET_SCHEMA_TRANSITION. * Sets the current schema to the name given as argument and returns the * schema transition. * @param nextSchema schema to transition to * @return a string of the form oldSchema-&gt;newSchema * @throws java.sql.SQLException */ public static String getSchemaTransition(String nextSchema) throws SQLException { Connection c = DriverManager.getConnection("jdbc:default:connection"); Statement s = c.createStatement(); ResultSet rs = s.executeQuery("VALUES CURRENT SCHEMA"); rs.next(); String prevSchema = rs.getString(1); rs.close(); s.execute("SET SCHEMA \"" + nextSchema + "\""); s.close(); return (prevSchema + "->" + nextSchema); } // Utilities private static IsoLevel[] isoLevels; private static int isolationIndex = -1; /** * Utility that cycles through the legal isolation levels in the following * order: read uncommitted -&gt; read committed -&gt; repeatable read -&gt; * serializable -&gt; read uncommitted -&gt; ... * @return IsoLevel object representing the isolation level. */ private static IsoLevel cycleIsolation() { ++isolationIndex; isolationIndex %= 4; return isoLevels[isolationIndex]; } /** * Utility that verifies that the isolation level reported by the client * is the same as evaluating 'VALUES CURRENT ISOLATION' and getting the * isolation level from the EmbedConnection on the server. * @param c Connection to check * @throws java.sql.SQLException */ private void verifyCachedIsolation(Connection c) throws SQLException { final int clientInt = c.getTransactionIsolation(); Statement s = createStatement(); final IsoLevel serverSql = new IsoLevel(s.executeQuery( "SELECT * FROM ISOLATION_NAMES " + "WHERE SQLNAME = (VALUES CURRENT ISOLATION)")); final IsoLevel serverJdbc = new IsoLevel(s.executeQuery( "SELECT * FROM ISOLATION_NAMES " + "WHERE ISOLEVEL = GET_TRANSACTION_ISOLATION_JDBC()")); final IsoLevel client = new IsoLevel(s.executeQuery("SELECT * FROM " + "ISOLATION_NAMES WHERE ISOLEVEL = "+clientInt)); s.getResultSet().close(); s.close(); assertEquals(serverSql, client); assertEquals(serverJdbc, client); } private void verifyCachedSchema(Connection c) throws SQLException { if (usingDerbyNetClient()) { String cached = ((ClientConnection) c).getCurrentSchemaName(); Statement s = c.createStatement(); ResultSet rs = s.executeQuery("VALUES CURRENT SCHEMA"); rs.next(); String reported = rs.getString(1); assertEquals(reported, cached); } else { println("Cannot verify cached schema for "+c.getClass()); } } // Test cases (fixtures) // Change the isolation level using SQL public void testChangeIsoLevelStatementSQL() throws SQLException { Connection c = getConnection(); Statement s = createStatement(); for (int i = 0; i < 4; ++i) { s.execute("SET ISOLATION "+isoLevels[i].getSqlName()); verifyCachedIsolation(c); } s.close(); } public void testChangeIsoLevelPreparedStatementSQL() throws SQLException { Connection c = getConnection(); for (int i = 0; i < 4; ++i) { PreparedStatement ps = prepareStatement("SET ISOLATION " + isoLevels[i].getSqlName()); ps.execute(); verifyCachedIsolation(c); ps.close(); } } // Change the isolation level using a function public void testChangeIsoLevelFunctionJDBC() throws SQLException { Connection c = getConnection(); c.setAutoCommit(true); Statement s = createStatement(); s.execute("CREATE TABLE T1(ISOLEVEL INT)"); for (int i = 0; i < 4; ++i) { s.execute("INSERT INTO T1 VALUES GET_CYCLE_ISOLATION_JDBC()"); verifyCachedIsolation(c); } s.close(); } public void testChangeIsoLevelFunctionSQL() throws SQLException { Connection c = getConnection(); c.setAutoCommit(true); Statement s = createStatement(); s.execute("CREATE TABLE T1(SQLNAME VARCHAR(2))"); for (int i = 0; i < 4; ++i) { s.executeUpdate("INSERT INTO T1 VALUES GET_CYCLE_ISOLATION_SQL()"); verifyCachedIsolation(c); } s.close(); } public void testChangeIsoLevelPreparedFunctionJDBC() throws SQLException { Connection c = getConnection(); c.setAutoCommit(true); Statement s = createStatement(); s.execute("CREATE TABLE T1(ISOLEVEL INT)"); PreparedStatement ps = prepareStatement("INSERT INTO T1 VALUES " + "GET_CYCLE_ISOLATION_JDBC()"); for (int i = 0; i < 4; ++i) { ps.executeUpdate(); verifyCachedIsolation(c); } ps.close(); } public void testChangeIsoLevelPreparedFunctionSQL() throws SQLException { Connection c = getConnection(); c.setAutoCommit(true); Statement s = createStatement(); s.execute("CREATE TABLE T1(SQLNAME VARCHAR(2))"); PreparedStatement ps = prepareStatement("INSERT INTO T1 VALUES " + "GET_CYCLE_ISOLATION_SQL()"); for (int i = 0; i < 4; ++i) { ps.executeUpdate(); verifyCachedIsolation(c); } ps.close(); } // Change isolation level from a stored procedure public void testChangeIsoLevelProcedureJDBC() throws SQLException { Connection c = getConnection(); Statement s = createStatement(); for (int i = 0; i < 4; ++i) { s.execute("CALL SET_ISOLATION_JDBC(" + isoLevels[i].getIsoLevel() + ")"); verifyCachedIsolation(c); } s.close(); } public void testChangeIsoLevelProcedureSQL() throws SQLException { Connection c = getConnection(); Statement s = createStatement(); for (int i = 0; i < 4; ++i) { s.execute("CALL SET_ISOLATION_SQL('" + isoLevels[i].getSqlName() + "')"); verifyCachedIsolation(c); } s.close(); } // Changing isolation level from a stored procedure invoked from a // callable statement public void testChangeIsoLevelCallableStatementJDBC() throws SQLException { Connection c = getConnection(); CallableStatement cs = prepareCall("CALL SET_ISOLATION_JDBC(?)"); for (int i = 0; i < 4; ++i) { cs.setInt(1, isoLevels[i].getIsoLevel()); cs.execute(); verifyCachedIsolation(c); } cs.close(); } public void testChangeIsoLevelCallableStatementSQL() throws SQLException { Connection c = getConnection(); CallableStatement cs = prepareCall("CALL SET_ISOLATION_SQL(?)"); for (int i = 0; i < 4; ++i) { cs.setString(1, isoLevels[i].getSqlName()); cs.execute(); verifyCachedIsolation(c); } cs.close(); } // Changing isolation level from executeBatch() public void testChangeIsoLevelSQLInBatch() throws SQLException { Statement s = createStatement(); for (int i = 0; i < isoLevels.length; ++i) { s.addBatch("SET ISOLATION " + isoLevels[i].getSqlName()); } try { s.executeBatch(); } catch (SQLException e) { SQLException prev = e; while (e != null) { prev = e; e = e.getNextException(); } throw prev; } verifyCachedIsolation(s.getConnection()); s.close(); } public void testChangeIsoLevelProcedureJdbcBatch() throws SQLException { Statement s = createStatement(); for (int i = 0; i < isoLevels.length; ++i) { s.addBatch("CALL SET_ISOLATION_JDBC(" + isoLevels[i].getIsoLevel() + ")"); } try { s.executeBatch(); } catch (SQLException e) { SQLException prev = e; while (e != null) { prev = e; e = e.getNextException(); } throw prev; } verifyCachedIsolation(s.getConnection()); s.close(); } public void testChangeIsoLevelProcedureSqlBatch() throws SQLException { Statement s = createStatement(); for (int i = 0; i < isoLevels.length; ++i) { s.addBatch("CALL SET_ISOLATION_SQL('" + isoLevels[i].getSqlName() + "')"); } try { s.executeBatch(); } catch (SQLException e) { SQLException prev = e; while (e != null) { prev = e; e = e.getNextException(); } throw prev; } verifyCachedIsolation(s.getConnection()); s.close(); } public void testChangeIsoLevelProcedureJdbcCallableBatch() throws SQLException { CallableStatement cs = prepareCall("CALL SET_ISOLATION_JDBC(?)"); for (int i = 0; i < isoLevels.length; ++i) { cs.setInt(1, isoLevels[i].getIsoLevel()); cs.addBatch(); } try { cs.executeBatch(); } catch (SQLException e) { SQLException prev = e; while (e != null) { prev = e; e = e.getNextException(); } throw prev; } verifyCachedIsolation(cs.getConnection()); cs.close(); } public void testChangeIsoLevelProcedureSqlCallableBatch() throws SQLException { CallableStatement cs = prepareCall("CALL SET_ISOLATION_SQL(?)"); for (int i = 0; i < isoLevels.length; ++i) { cs.setString(1, isoLevels[i].getSqlName()); cs.addBatch(); } try { cs.executeBatch(); } catch (SQLException e) { SQLException prev = e; while (e != null) { prev = e; e = e.getNextException(); } throw prev; } verifyCachedIsolation(cs.getConnection()); cs.close(); } /** * Utility method for testing Statements that return different * types of ResultSets to check that piggybacking doesn't cause problems. * @param table table to select from * @param type type of ResultSet * @param concur concurrency of ResultSet * @throws java.sql.SQLException */ private void cursorTest(String table, int type, int concur) throws SQLException { Connection c = getConnection(); Statement s = createStatement(type, concur); ResultSet rs = s.executeQuery("SELECT * FROM "+table); verifyCachedIsolation(c); while (rs.next()) { verifyCachedIsolation(c); } rs.close(); s.close(); } /** * Utility method for testing PreparedStatements that return different * types of ResultSets to check that piggybacking doesn't cause problems. * * @param table table to select from * @param type type of ResultSet * @param concur concurrency of ResultSet * @throws java.sql.SQLException */ private void preparedCursorTest(String table, int type, int concur) throws SQLException { Connection c = getConnection(); PreparedStatement ps = c.prepareStatement("SELECT * FROM " + table, type, concur); ResultSet rs = ps.executeQuery(); verifyCachedIsolation(c); while (rs.next()) { verifyCachedIsolation(c); } rs.close(); ps.close(); } public void testSmallForwardOnlyReadOnly() throws SQLException { cursorTest("ISOLATION_NAMES", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); } public void testSmallScrollInsensitiveReadOnly() throws SQLException { cursorTest("ISOLATION_NAMES", ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); } public void testSmallScrollSensitiveReadOnly() throws SQLException { cursorTest("ISOLATION_NAMES", ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_READ_ONLY); } public void testSmallForwardOnlyUpdatable() throws SQLException { cursorTest("ISOLATION_NAMES", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE); } public void testSmallScrollInsensitiveUpdatable() throws SQLException { cursorTest("ISOLATION_NAMES",ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_UPDATABLE); } public void testSmallScrollSensitiveUpdatable() throws SQLException { cursorTest("ISOLATION_NAMES", ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE); } public void testSmallPreparedForwardOnlyReadOnly() throws SQLException { preparedCursorTest("ISOLATION_NAMES", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); } public void testSmallPreparedScrollSensitiveReadOnly() throws SQLException { preparedCursorTest("ISOLATION_NAMES", ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_READ_ONLY); } public void testSmallPreparedScrollInsensitiveReadOnly() throws SQLException { preparedCursorTest("ISOLATION_NAMES", ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); } public void testSmallPreparedForwardOnlyUpdatable() throws SQLException { preparedCursorTest("ISOLATION_NAMES", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE); } public void testSmallPreparedScrollSensitiveUpdatable() throws SQLException { preparedCursorTest("ISOLATION_NAMES", ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE); } public void testSmallPreparedScrollInsensitiveUpdatable() throws SQLException { preparedCursorTest("ISOLATION_NAMES", ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_UPDATABLE); } public void testLargeForwardOnlyReadOnly() throws SQLException { cursorTest("BIG", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); } public void testLargeScrollSensitiveReadOnly() throws SQLException { cursorTest("BIG", ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_READ_ONLY); } public void testLargeScrollInsensitiveReadOnly() throws SQLException { cursorTest("BIG", ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); } public void testLargeForwardOnlyUpdatable() throws SQLException { cursorTest("BIG", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE); } public void testLargeScrollSensitiveUpdatable() throws SQLException { cursorTest("BIG", ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE); } public void testLargeScrollInsensitiveUpdatable() throws SQLException { cursorTest("BIG", ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_UPDATABLE); } public void testLargePreparedForwardOnlyReadOnly() throws SQLException { preparedCursorTest("BIG", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); } public void testLargePreparedScrollSensitiveReadOnly() throws SQLException { preparedCursorTest("BIG", ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_READ_ONLY); } public void testLargePreparedScrollInsensitiveReadOnly() throws SQLException { preparedCursorTest("BIG", ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); } public void testLargePreparedForwardOnlyUpdatable() throws SQLException { preparedCursorTest("BIG", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE); } public void testLargePreparedScrollSensitiveUpdatable() throws SQLException { preparedCursorTest("BIG", ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE); } public void testLargePreparedScrollInsensitiveUpdatable() throws SQLException { preparedCursorTest("BIG", ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_UPDATABLE); } // Test that the current schema is piggy-backed correctly public void testSetSchema() throws SQLException { Statement s = createStatement(); s.execute("SET SCHEMA FOO"); verifyCachedSchema(getConnection()); s.execute("SET SCHEMA \"\u00bbMY\u20ac\u00ab\""); verifyCachedSchema(getConnection()); } public void testPreparedSetSchema() throws SQLException { PreparedStatement ps = prepareStatement("SET SCHEMA ?"); ps.setString(1, "FOO"); ps.execute(); verifyCachedSchema(getConnection()); ps.setString(1, "\u00bbMY\u20ac\u00ab"); ps.execute(); verifyCachedSchema(getConnection()); } public void testSetSchemaProcedure() throws SQLException { Statement s = createStatement(); s.execute("CALL APP.SET_SCHEMA('FOO')"); verifyCachedSchema(getConnection()); s.execute("CALL APP.SET_SCHEMA('\"\u00bbMY\u20ac\u00ab\"')"); verifyCachedSchema(getConnection()); } public void testPreparedSetSchemaProcedure() throws SQLException { CallableStatement cs = prepareCall("CALL APP.SET_SCHEMA(?)"); cs.setString(1, "FOO"); cs.execute(); verifyCachedSchema(getConnection()); cs.setString(1, "\"\u00bbMY\u20ac\u00ab\""); cs.execute(); verifyCachedSchema(getConnection()); } public void testSetSchemaFunction() throws SQLException { Statement s = createStatement(); ResultSet rs = s.executeQuery("SELECT " + "APP.GET_SCHEMA_TRANSITION(SCHEMANAME) FROM SYS.SYSSCHEMAS"); while (rs.next()) { assertTrue(rs.getString(1).length() > 2); verifyCachedSchema(getConnection()); } } public void testPreparedSetSchemaFunction() throws SQLException { PreparedStatement ps = prepareStatement("SELECT " + "APP.GET_SCHEMA_TRANSITION(SCHEMANAME) FROM SYS.SYSSCHEMAS"); ResultSet rs = ps.executeQuery(); while (rs.next()) { assertTrue(rs.getString(1).length() > 2); verifyCachedSchema(getConnection()); } } public void testSetSchemaFunctionLarge() throws SQLException { Statement s = createStatement(); ResultSet rs = s.executeQuery("SELECT X, " + "APP.GET_SCHEMA_TRANSITION(SCHEMANAME), " + "Y FROM APP.LARGE"); while (rs.next()) { assertTrue(rs.getString(2).length() > 2); verifyCachedSchema(getConnection()); } } public void testPreparedSetSchemaFunctionLarge() throws SQLException { PreparedStatement ps = prepareStatement("SELECT X, " + "APP.GET_SCHEMA_TRANSITION(SCHEMANAME), " + "Y FROM APP.LARGE"); ResultSet rs = ps.executeQuery(); while (rs.next()) { assertTrue(rs.getString(2).length() > 2); verifyCachedSchema(getConnection()); } } }
googleapis/google-cloud-java
36,612
java-analyticshub/proto-google-cloud-analyticshub-v1/src/main/java/com/google/cloud/bigquery/analyticshub/v1/UpdateQueryTemplateRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/bigquery/analyticshub/v1/analyticshub.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.bigquery.analyticshub.v1; /** * * * <pre> * Message for updating a QueryTemplate. * </pre> * * Protobuf type {@code google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest} */ public final class UpdateQueryTemplateRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest) UpdateQueryTemplateRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateQueryTemplateRequest.newBuilder() to construct. private UpdateQueryTemplateRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateQueryTemplateRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateQueryTemplateRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.bigquery.analyticshub.v1.AnalyticsHubProto .internal_static_google_cloud_bigquery_analyticshub_v1_UpdateQueryTemplateRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.bigquery.analyticshub.v1.AnalyticsHubProto .internal_static_google_cloud_bigquery_analyticshub_v1_UpdateQueryTemplateRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest.class, com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest.Builder.class); } private int bitField0_; public static final int UPDATE_MASK_FIELD_NUMBER = 1; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Optional. Field mask specifies the fields to update in the query template * resource. The fields specified in the `updateMask` are relative to the * resource and are not a full request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Optional. Field mask specifies the fields to update in the query template * resource. The fields specified in the `updateMask` are relative to the * resource and are not a full request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Optional. Field mask specifies the fields to update in the query template * resource. The fields specified in the `updateMask` are relative to the * resource and are not a full request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } public static final int QUERY_TEMPLATE_FIELD_NUMBER = 2; private com.google.cloud.bigquery.analyticshub.v1.QueryTemplate queryTemplate_; /** * * * <pre> * Required. The QueryTemplate to update. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.QueryTemplate query_template = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the queryTemplate field is set. */ @java.lang.Override public boolean hasQueryTemplate() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The QueryTemplate to update. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.QueryTemplate query_template = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The queryTemplate. */ @java.lang.Override public com.google.cloud.bigquery.analyticshub.v1.QueryTemplate getQueryTemplate() { return queryTemplate_ == null ? com.google.cloud.bigquery.analyticshub.v1.QueryTemplate.getDefaultInstance() : queryTemplate_; } /** * * * <pre> * Required. The QueryTemplate to update. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.QueryTemplate query_template = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.bigquery.analyticshub.v1.QueryTemplateOrBuilder getQueryTemplateOrBuilder() { return queryTemplate_ == null ? com.google.cloud.bigquery.analyticshub.v1.QueryTemplate.getDefaultInstance() : queryTemplate_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getUpdateMask()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getQueryTemplate()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getUpdateMask()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getQueryTemplate()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest)) { return super.equals(obj); } com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest other = (com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest) obj; if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (hasQueryTemplate() != other.hasQueryTemplate()) return false; if (hasQueryTemplate()) { if (!getQueryTemplate().equals(other.getQueryTemplate())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } if (hasQueryTemplate()) { hash = (37 * hash) + QUERY_TEMPLATE_FIELD_NUMBER; hash = (53 * hash) + getQueryTemplate().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Message for updating a QueryTemplate. * </pre> * * Protobuf type {@code google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest) com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.bigquery.analyticshub.v1.AnalyticsHubProto .internal_static_google_cloud_bigquery_analyticshub_v1_UpdateQueryTemplateRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.bigquery.analyticshub.v1.AnalyticsHubProto .internal_static_google_cloud_bigquery_analyticshub_v1_UpdateQueryTemplateRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest.class, com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest.Builder.class); } // Construct using // com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getUpdateMaskFieldBuilder(); getQueryTemplateFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } queryTemplate_ = null; if (queryTemplateBuilder_ != null) { queryTemplateBuilder_.dispose(); queryTemplateBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.bigquery.analyticshub.v1.AnalyticsHubProto .internal_static_google_cloud_bigquery_analyticshub_v1_UpdateQueryTemplateRequest_descriptor; } @java.lang.Override public com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest getDefaultInstanceForType() { return com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest build() { com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest buildPartial() { com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest result = new com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.queryTemplate_ = queryTemplateBuilder_ == null ? queryTemplate_ : queryTemplateBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest) { return mergeFrom( (com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest other) { if (other == com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest .getDefaultInstance()) return this; if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } if (other.hasQueryTemplate()) { mergeQueryTemplate(other.getQueryTemplate()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getQueryTemplateFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Optional. Field mask specifies the fields to update in the query template * resource. The fields specified in the `updateMask` are relative to the * resource and are not a full request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Optional. Field mask specifies the fields to update in the query template * resource. The fields specified in the `updateMask` are relative to the * resource and are not a full request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Optional. Field mask specifies the fields to update in the query template * resource. The fields specified in the `updateMask` are relative to the * resource and are not a full request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Optional. Field mask specifies the fields to update in the query template * resource. The fields specified in the `updateMask` are relative to the * resource and are not a full request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Optional. Field mask specifies the fields to update in the query template * resource. The fields specified in the `updateMask` are relative to the * resource and are not a full request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Optional. Field mask specifies the fields to update in the query template * resource. The fields specified in the `updateMask` are relative to the * resource and are not a full request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000001); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Optional. Field mask specifies the fields to update in the query template * resource. The fields specified in the `updateMask` are relative to the * resource and are not a full request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000001; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Optional. Field mask specifies the fields to update in the query template * resource. The fields specified in the `updateMask` are relative to the * resource and are not a full request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Optional. Field mask specifies the fields to update in the query template * resource. The fields specified in the `updateMask` are relative to the * resource and are not a full request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } private com.google.cloud.bigquery.analyticshub.v1.QueryTemplate queryTemplate_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.bigquery.analyticshub.v1.QueryTemplate, com.google.cloud.bigquery.analyticshub.v1.QueryTemplate.Builder, com.google.cloud.bigquery.analyticshub.v1.QueryTemplateOrBuilder> queryTemplateBuilder_; /** * * * <pre> * Required. The QueryTemplate to update. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.QueryTemplate query_template = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the queryTemplate field is set. */ public boolean hasQueryTemplate() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The QueryTemplate to update. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.QueryTemplate query_template = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The queryTemplate. */ public com.google.cloud.bigquery.analyticshub.v1.QueryTemplate getQueryTemplate() { if (queryTemplateBuilder_ == null) { return queryTemplate_ == null ? com.google.cloud.bigquery.analyticshub.v1.QueryTemplate.getDefaultInstance() : queryTemplate_; } else { return queryTemplateBuilder_.getMessage(); } } /** * * * <pre> * Required. The QueryTemplate to update. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.QueryTemplate query_template = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setQueryTemplate(com.google.cloud.bigquery.analyticshub.v1.QueryTemplate value) { if (queryTemplateBuilder_ == null) { if (value == null) { throw new NullPointerException(); } queryTemplate_ = value; } else { queryTemplateBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The QueryTemplate to update. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.QueryTemplate query_template = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setQueryTemplate( com.google.cloud.bigquery.analyticshub.v1.QueryTemplate.Builder builderForValue) { if (queryTemplateBuilder_ == null) { queryTemplate_ = builderForValue.build(); } else { queryTemplateBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The QueryTemplate to update. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.QueryTemplate query_template = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeQueryTemplate( com.google.cloud.bigquery.analyticshub.v1.QueryTemplate value) { if (queryTemplateBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && queryTemplate_ != null && queryTemplate_ != com.google.cloud.bigquery.analyticshub.v1.QueryTemplate.getDefaultInstance()) { getQueryTemplateBuilder().mergeFrom(value); } else { queryTemplate_ = value; } } else { queryTemplateBuilder_.mergeFrom(value); } if (queryTemplate_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The QueryTemplate to update. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.QueryTemplate query_template = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearQueryTemplate() { bitField0_ = (bitField0_ & ~0x00000002); queryTemplate_ = null; if (queryTemplateBuilder_ != null) { queryTemplateBuilder_.dispose(); queryTemplateBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The QueryTemplate to update. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.QueryTemplate query_template = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.bigquery.analyticshub.v1.QueryTemplate.Builder getQueryTemplateBuilder() { bitField0_ |= 0x00000002; onChanged(); return getQueryTemplateFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The QueryTemplate to update. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.QueryTemplate query_template = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.bigquery.analyticshub.v1.QueryTemplateOrBuilder getQueryTemplateOrBuilder() { if (queryTemplateBuilder_ != null) { return queryTemplateBuilder_.getMessageOrBuilder(); } else { return queryTemplate_ == null ? com.google.cloud.bigquery.analyticshub.v1.QueryTemplate.getDefaultInstance() : queryTemplate_; } } /** * * * <pre> * Required. The QueryTemplate to update. * </pre> * * <code> * .google.cloud.bigquery.analyticshub.v1.QueryTemplate query_template = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.bigquery.analyticshub.v1.QueryTemplate, com.google.cloud.bigquery.analyticshub.v1.QueryTemplate.Builder, com.google.cloud.bigquery.analyticshub.v1.QueryTemplateOrBuilder> getQueryTemplateFieldBuilder() { if (queryTemplateBuilder_ == null) { queryTemplateBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.bigquery.analyticshub.v1.QueryTemplate, com.google.cloud.bigquery.analyticshub.v1.QueryTemplate.Builder, com.google.cloud.bigquery.analyticshub.v1.QueryTemplateOrBuilder>( getQueryTemplate(), getParentForChildren(), isClean()); queryTemplate_ = null; } return queryTemplateBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest) } // @@protoc_insertion_point(class_scope:google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest) private static final com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest(); } public static com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateQueryTemplateRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateQueryTemplateRequest>() { @java.lang.Override public UpdateQueryTemplateRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateQueryTemplateRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateQueryTemplateRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.bigquery.analyticshub.v1.UpdateQueryTemplateRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,829
java-compute/google-cloud-compute/src/main/java/com/google/cloud/compute/v1/stub/TargetHttpProxiesStubSettings.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.compute.v1.stub; import static com.google.cloud.compute.v1.TargetHttpProxiesClient.AggregatedListPagedResponse; import static com.google.cloud.compute.v1.TargetHttpProxiesClient.ListPagedResponse; import com.google.api.core.ApiFunction; import com.google.api.core.ApiFuture; import com.google.api.core.ObsoleteApi; import com.google.api.gax.core.GaxProperties; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.httpjson.GaxHttpJsonProperties; import com.google.api.gax.httpjson.HttpJsonTransportChannel; import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider; import com.google.api.gax.httpjson.ProtoOperationTransformers; import com.google.api.gax.longrunning.OperationSnapshot; import com.google.api.gax.longrunning.OperationTimedPollAlgorithm; import com.google.api.gax.retrying.RetrySettings; import com.google.api.gax.rpc.ApiCallContext; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.OperationCallSettings; import com.google.api.gax.rpc.PageContext; import com.google.api.gax.rpc.PagedCallSettings; import com.google.api.gax.rpc.PagedListDescriptor; import com.google.api.gax.rpc.PagedListResponseFactory; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.compute.v1.AggregatedListTargetHttpProxiesRequest; import com.google.cloud.compute.v1.DeleteTargetHttpProxyRequest; import com.google.cloud.compute.v1.GetTargetHttpProxyRequest; import com.google.cloud.compute.v1.InsertTargetHttpProxyRequest; import com.google.cloud.compute.v1.ListTargetHttpProxiesRequest; import com.google.cloud.compute.v1.Operation; import com.google.cloud.compute.v1.PatchTargetHttpProxyRequest; import com.google.cloud.compute.v1.SetUrlMapTargetHttpProxyRequest; import com.google.cloud.compute.v1.TargetHttpProxiesScopedList; import com.google.cloud.compute.v1.TargetHttpProxy; import com.google.cloud.compute.v1.TargetHttpProxyAggregatedList; import com.google.cloud.compute.v1.TargetHttpProxyList; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import java.io.IOException; import java.time.Duration; import java.util.List; import java.util.Map; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link TargetHttpProxiesStub}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (compute.googleapis.com) and default port (443) are used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the * [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings) * of get: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * TargetHttpProxiesStubSettings.Builder targetHttpProxiesSettingsBuilder = * TargetHttpProxiesStubSettings.newBuilder(); * targetHttpProxiesSettingsBuilder * .getSettings() * .setRetrySettings( * targetHttpProxiesSettingsBuilder * .getSettings() * .getRetrySettings() * .toBuilder() * .setInitialRetryDelayDuration(Duration.ofSeconds(1)) * .setInitialRpcTimeoutDuration(Duration.ofSeconds(5)) * .setMaxAttempts(5) * .setMaxRetryDelayDuration(Duration.ofSeconds(30)) * .setMaxRpcTimeoutDuration(Duration.ofSeconds(60)) * .setRetryDelayMultiplier(1.3) * .setRpcTimeoutMultiplier(1.5) * .setTotalTimeoutDuration(Duration.ofSeconds(300)) * .build()); * TargetHttpProxiesStubSettings targetHttpProxiesSettings = * targetHttpProxiesSettingsBuilder.build(); * }</pre> * * Please refer to the [Client Side Retry * Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for * additional support in setting retries. * * <p>To configure the RetrySettings of a Long Running Operation method, create an * OperationTimedPollAlgorithm object and update the RPC's polling algorithm. For example, to * configure the RetrySettings for delete: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * TargetHttpProxiesStubSettings.Builder targetHttpProxiesSettingsBuilder = * TargetHttpProxiesStubSettings.newBuilder(); * TimedRetryAlgorithm timedRetryAlgorithm = * OperationalTimedPollAlgorithm.create( * RetrySettings.newBuilder() * .setInitialRetryDelayDuration(Duration.ofMillis(500)) * .setRetryDelayMultiplier(1.5) * .setMaxRetryDelayDuration(Duration.ofMillis(5000)) * .setTotalTimeoutDuration(Duration.ofHours(24)) * .build()); * targetHttpProxiesSettingsBuilder * .createClusterOperationSettings() * .setPollingAlgorithm(timedRetryAlgorithm) * .build(); * }</pre> */ @Generated("by gapic-generator-java") public class TargetHttpProxiesStubSettings extends StubSettings<TargetHttpProxiesStubSettings> { /** The default scopes of the service. */ private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES = ImmutableList.<String>builder() .add("https://www.googleapis.com/auth/compute") .add("https://www.googleapis.com/auth/cloud-platform") .build(); private final PagedCallSettings< AggregatedListTargetHttpProxiesRequest, TargetHttpProxyAggregatedList, AggregatedListPagedResponse> aggregatedListSettings; private final UnaryCallSettings<DeleteTargetHttpProxyRequest, Operation> deleteSettings; private final OperationCallSettings<DeleteTargetHttpProxyRequest, Operation, Operation> deleteOperationSettings; private final UnaryCallSettings<GetTargetHttpProxyRequest, TargetHttpProxy> getSettings; private final UnaryCallSettings<InsertTargetHttpProxyRequest, Operation> insertSettings; private final OperationCallSettings<InsertTargetHttpProxyRequest, Operation, Operation> insertOperationSettings; private final PagedCallSettings< ListTargetHttpProxiesRequest, TargetHttpProxyList, ListPagedResponse> listSettings; private final UnaryCallSettings<PatchTargetHttpProxyRequest, Operation> patchSettings; private final OperationCallSettings<PatchTargetHttpProxyRequest, Operation, Operation> patchOperationSettings; private final UnaryCallSettings<SetUrlMapTargetHttpProxyRequest, Operation> setUrlMapSettings; private final OperationCallSettings<SetUrlMapTargetHttpProxyRequest, Operation, Operation> setUrlMapOperationSettings; private static final PagedListDescriptor< AggregatedListTargetHttpProxiesRequest, TargetHttpProxyAggregatedList, Map.Entry<String, TargetHttpProxiesScopedList>> AGGREGATED_LIST_PAGE_STR_DESC = new PagedListDescriptor< AggregatedListTargetHttpProxiesRequest, TargetHttpProxyAggregatedList, Map.Entry<String, TargetHttpProxiesScopedList>>() { @Override public String emptyToken() { return ""; } @Override public AggregatedListTargetHttpProxiesRequest injectToken( AggregatedListTargetHttpProxiesRequest payload, String token) { return AggregatedListTargetHttpProxiesRequest.newBuilder(payload) .setPageToken(token) .build(); } @Override public AggregatedListTargetHttpProxiesRequest injectPageSize( AggregatedListTargetHttpProxiesRequest payload, int pageSize) { return AggregatedListTargetHttpProxiesRequest.newBuilder(payload) .setMaxResults(pageSize) .build(); } @Override public Integer extractPageSize(AggregatedListTargetHttpProxiesRequest payload) { return payload.getMaxResults(); } @Override public String extractNextToken(TargetHttpProxyAggregatedList payload) { return payload.getNextPageToken(); } @Override public Iterable<Map.Entry<String, TargetHttpProxiesScopedList>> extractResources( TargetHttpProxyAggregatedList payload) { return payload.getItemsMap().entrySet(); } }; private static final PagedListDescriptor< ListTargetHttpProxiesRequest, TargetHttpProxyList, TargetHttpProxy> LIST_PAGE_STR_DESC = new PagedListDescriptor< ListTargetHttpProxiesRequest, TargetHttpProxyList, TargetHttpProxy>() { @Override public String emptyToken() { return ""; } @Override public ListTargetHttpProxiesRequest injectToken( ListTargetHttpProxiesRequest payload, String token) { return ListTargetHttpProxiesRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListTargetHttpProxiesRequest injectPageSize( ListTargetHttpProxiesRequest payload, int pageSize) { return ListTargetHttpProxiesRequest.newBuilder(payload) .setMaxResults(pageSize) .build(); } @Override public Integer extractPageSize(ListTargetHttpProxiesRequest payload) { return payload.getMaxResults(); } @Override public String extractNextToken(TargetHttpProxyList payload) { return payload.getNextPageToken(); } @Override public Iterable<TargetHttpProxy> extractResources(TargetHttpProxyList payload) { return payload.getItemsList(); } }; private static final PagedListResponseFactory< AggregatedListTargetHttpProxiesRequest, TargetHttpProxyAggregatedList, AggregatedListPagedResponse> AGGREGATED_LIST_PAGE_STR_FACT = new PagedListResponseFactory< AggregatedListTargetHttpProxiesRequest, TargetHttpProxyAggregatedList, AggregatedListPagedResponse>() { @Override public ApiFuture<AggregatedListPagedResponse> getFuturePagedResponse( UnaryCallable<AggregatedListTargetHttpProxiesRequest, TargetHttpProxyAggregatedList> callable, AggregatedListTargetHttpProxiesRequest request, ApiCallContext context, ApiFuture<TargetHttpProxyAggregatedList> futureResponse) { PageContext< AggregatedListTargetHttpProxiesRequest, TargetHttpProxyAggregatedList, Map.Entry<String, TargetHttpProxiesScopedList>> pageContext = PageContext.create(callable, AGGREGATED_LIST_PAGE_STR_DESC, request, context); return AggregatedListPagedResponse.createAsync(pageContext, futureResponse); } }; private static final PagedListResponseFactory< ListTargetHttpProxiesRequest, TargetHttpProxyList, ListPagedResponse> LIST_PAGE_STR_FACT = new PagedListResponseFactory< ListTargetHttpProxiesRequest, TargetHttpProxyList, ListPagedResponse>() { @Override public ApiFuture<ListPagedResponse> getFuturePagedResponse( UnaryCallable<ListTargetHttpProxiesRequest, TargetHttpProxyList> callable, ListTargetHttpProxiesRequest request, ApiCallContext context, ApiFuture<TargetHttpProxyList> futureResponse) { PageContext<ListTargetHttpProxiesRequest, TargetHttpProxyList, TargetHttpProxy> pageContext = PageContext.create(callable, LIST_PAGE_STR_DESC, request, context); return ListPagedResponse.createAsync(pageContext, futureResponse); } }; /** Returns the object with the settings used for calls to aggregatedList. */ public PagedCallSettings< AggregatedListTargetHttpProxiesRequest, TargetHttpProxyAggregatedList, AggregatedListPagedResponse> aggregatedListSettings() { return aggregatedListSettings; } /** Returns the object with the settings used for calls to delete. */ public UnaryCallSettings<DeleteTargetHttpProxyRequest, Operation> deleteSettings() { return deleteSettings; } /** Returns the object with the settings used for calls to delete. */ public OperationCallSettings<DeleteTargetHttpProxyRequest, Operation, Operation> deleteOperationSettings() { return deleteOperationSettings; } /** Returns the object with the settings used for calls to get. */ public UnaryCallSettings<GetTargetHttpProxyRequest, TargetHttpProxy> getSettings() { return getSettings; } /** Returns the object with the settings used for calls to insert. */ public UnaryCallSettings<InsertTargetHttpProxyRequest, Operation> insertSettings() { return insertSettings; } /** Returns the object with the settings used for calls to insert. */ public OperationCallSettings<InsertTargetHttpProxyRequest, Operation, Operation> insertOperationSettings() { return insertOperationSettings; } /** Returns the object with the settings used for calls to list. */ public PagedCallSettings<ListTargetHttpProxiesRequest, TargetHttpProxyList, ListPagedResponse> listSettings() { return listSettings; } /** Returns the object with the settings used for calls to patch. */ public UnaryCallSettings<PatchTargetHttpProxyRequest, Operation> patchSettings() { return patchSettings; } /** Returns the object with the settings used for calls to patch. */ public OperationCallSettings<PatchTargetHttpProxyRequest, Operation, Operation> patchOperationSettings() { return patchOperationSettings; } /** Returns the object with the settings used for calls to setUrlMap. */ public UnaryCallSettings<SetUrlMapTargetHttpProxyRequest, Operation> setUrlMapSettings() { return setUrlMapSettings; } /** Returns the object with the settings used for calls to setUrlMap. */ public OperationCallSettings<SetUrlMapTargetHttpProxyRequest, Operation, Operation> setUrlMapOperationSettings() { return setUrlMapOperationSettings; } public TargetHttpProxiesStub createStub() throws IOException { if (getTransportChannelProvider() .getTransportName() .equals(HttpJsonTransportChannel.getHttpJsonTransportName())) { return HttpJsonTargetHttpProxiesStub.create(this); } throw new UnsupportedOperationException( String.format( "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns the default service name. */ @Override public String getServiceName() { return "compute"; } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return InstantiatingExecutorProvider.newBuilder(); } /** Returns the default service endpoint. */ @ObsoleteApi("Use getEndpoint() instead") public static String getDefaultEndpoint() { return "compute.googleapis.com:443"; } /** Returns the default mTLS service endpoint. */ public static String getDefaultMtlsEndpoint() { return "compute.mtls.googleapis.com:443"; } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return DEFAULT_SERVICE_SCOPES; } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return GoogleCredentialsProvider.newBuilder() .setScopesToApply(DEFAULT_SERVICE_SCOPES) .setUseJwtAccessWithScope(true); } /** Returns a builder for the default ChannelProvider for this service. */ public static InstantiatingHttpJsonChannelProvider.Builder defaultHttpJsonTransportProviderBuilder() { return InstantiatingHttpJsonChannelProvider.newBuilder(); } public static TransportChannelProvider defaultTransportChannelProvider() { return defaultHttpJsonTransportProviderBuilder().build(); } public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken( "gapic", GaxProperties.getLibraryVersion(TargetHttpProxiesStubSettings.class)) .setTransportToken( GaxHttpJsonProperties.getHttpJsonTokenName(), GaxHttpJsonProperties.getHttpJsonVersion()); } /** Returns a new builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected TargetHttpProxiesStubSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); aggregatedListSettings = settingsBuilder.aggregatedListSettings().build(); deleteSettings = settingsBuilder.deleteSettings().build(); deleteOperationSettings = settingsBuilder.deleteOperationSettings().build(); getSettings = settingsBuilder.getSettings().build(); insertSettings = settingsBuilder.insertSettings().build(); insertOperationSettings = settingsBuilder.insertOperationSettings().build(); listSettings = settingsBuilder.listSettings().build(); patchSettings = settingsBuilder.patchSettings().build(); patchOperationSettings = settingsBuilder.patchOperationSettings().build(); setUrlMapSettings = settingsBuilder.setUrlMapSettings().build(); setUrlMapOperationSettings = settingsBuilder.setUrlMapOperationSettings().build(); } /** Builder for TargetHttpProxiesStubSettings. */ public static class Builder extends StubSettings.Builder<TargetHttpProxiesStubSettings, Builder> { private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders; private final PagedCallSettings.Builder< AggregatedListTargetHttpProxiesRequest, TargetHttpProxyAggregatedList, AggregatedListPagedResponse> aggregatedListSettings; private final UnaryCallSettings.Builder<DeleteTargetHttpProxyRequest, Operation> deleteSettings; private final OperationCallSettings.Builder<DeleteTargetHttpProxyRequest, Operation, Operation> deleteOperationSettings; private final UnaryCallSettings.Builder<GetTargetHttpProxyRequest, TargetHttpProxy> getSettings; private final UnaryCallSettings.Builder<InsertTargetHttpProxyRequest, Operation> insertSettings; private final OperationCallSettings.Builder<InsertTargetHttpProxyRequest, Operation, Operation> insertOperationSettings; private final PagedCallSettings.Builder< ListTargetHttpProxiesRequest, TargetHttpProxyList, ListPagedResponse> listSettings; private final UnaryCallSettings.Builder<PatchTargetHttpProxyRequest, Operation> patchSettings; private final OperationCallSettings.Builder<PatchTargetHttpProxyRequest, Operation, Operation> patchOperationSettings; private final UnaryCallSettings.Builder<SetUrlMapTargetHttpProxyRequest, Operation> setUrlMapSettings; private final OperationCallSettings.Builder< SetUrlMapTargetHttpProxyRequest, Operation, Operation> setUrlMapOperationSettings; private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions = ImmutableMap.builder(); definitions.put( "retry_policy_0_codes", ImmutableSet.copyOf( Lists.<StatusCode.Code>newArrayList( StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE))); definitions.put( "no_retry_1_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList())); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS; static { ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder(); RetrySettings settings = null; settings = RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(100L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelayDuration(Duration.ofMillis(60000L)) .setInitialRpcTimeoutDuration(Duration.ofMillis(600000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ofMillis(600000L)) .setTotalTimeoutDuration(Duration.ofMillis(600000L)) .build(); definitions.put("retry_policy_0_params", settings); settings = RetrySettings.newBuilder() .setInitialRpcTimeoutDuration(Duration.ofMillis(600000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ofMillis(600000L)) .setTotalTimeoutDuration(Duration.ofMillis(600000L)) .build(); definitions.put("no_retry_1_params", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(clientContext); aggregatedListSettings = PagedCallSettings.newBuilder(AGGREGATED_LIST_PAGE_STR_FACT); deleteSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); deleteOperationSettings = OperationCallSettings.newBuilder(); getSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); insertSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); insertOperationSettings = OperationCallSettings.newBuilder(); listSettings = PagedCallSettings.newBuilder(LIST_PAGE_STR_FACT); patchSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); patchOperationSettings = OperationCallSettings.newBuilder(); setUrlMapSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); setUrlMapOperationSettings = OperationCallSettings.newBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( aggregatedListSettings, deleteSettings, getSettings, insertSettings, listSettings, patchSettings, setUrlMapSettings); initDefaults(this); } protected Builder(TargetHttpProxiesStubSettings settings) { super(settings); aggregatedListSettings = settings.aggregatedListSettings.toBuilder(); deleteSettings = settings.deleteSettings.toBuilder(); deleteOperationSettings = settings.deleteOperationSettings.toBuilder(); getSettings = settings.getSettings.toBuilder(); insertSettings = settings.insertSettings.toBuilder(); insertOperationSettings = settings.insertOperationSettings.toBuilder(); listSettings = settings.listSettings.toBuilder(); patchSettings = settings.patchSettings.toBuilder(); patchOperationSettings = settings.patchOperationSettings.toBuilder(); setUrlMapSettings = settings.setUrlMapSettings.toBuilder(); setUrlMapOperationSettings = settings.setUrlMapOperationSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( aggregatedListSettings, deleteSettings, getSettings, insertSettings, listSettings, patchSettings, setUrlMapSettings); } private static Builder createDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder initDefaults(Builder builder) { builder .aggregatedListSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .deleteSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")); builder .getSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .insertSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")); builder .listSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .patchSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")); builder .setUrlMapSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")); builder .deleteOperationSettings() .setInitialCallSettings( UnaryCallSettings .<DeleteTargetHttpProxyRequest, OperationSnapshot>newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create(Operation.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create(Operation.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(500L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelayDuration(Duration.ofMillis(20000L)) .setInitialRpcTimeoutDuration(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ZERO) .setTotalTimeoutDuration(Duration.ofMillis(600000L)) .build())); builder .insertOperationSettings() .setInitialCallSettings( UnaryCallSettings .<InsertTargetHttpProxyRequest, OperationSnapshot>newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create(Operation.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create(Operation.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(500L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelayDuration(Duration.ofMillis(20000L)) .setInitialRpcTimeoutDuration(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ZERO) .setTotalTimeoutDuration(Duration.ofMillis(600000L)) .build())); builder .patchOperationSettings() .setInitialCallSettings( UnaryCallSettings .<PatchTargetHttpProxyRequest, OperationSnapshot>newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create(Operation.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create(Operation.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(500L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelayDuration(Duration.ofMillis(20000L)) .setInitialRpcTimeoutDuration(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ZERO) .setTotalTimeoutDuration(Duration.ofMillis(600000L)) .build())); builder .setUrlMapOperationSettings() .setInitialCallSettings( UnaryCallSettings .<SetUrlMapTargetHttpProxyRequest, OperationSnapshot>newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create(Operation.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create(Operation.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(500L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelayDuration(Duration.ofMillis(20000L)) .setInitialRpcTimeoutDuration(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ZERO) .setTotalTimeoutDuration(Duration.ofMillis(600000L)) .build())); return builder; } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater); return this; } public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() { return unaryMethodSettingsBuilders; } /** Returns the builder for the settings used for calls to aggregatedList. */ public PagedCallSettings.Builder< AggregatedListTargetHttpProxiesRequest, TargetHttpProxyAggregatedList, AggregatedListPagedResponse> aggregatedListSettings() { return aggregatedListSettings; } /** Returns the builder for the settings used for calls to delete. */ public UnaryCallSettings.Builder<DeleteTargetHttpProxyRequest, Operation> deleteSettings() { return deleteSettings; } /** Returns the builder for the settings used for calls to delete. */ public OperationCallSettings.Builder<DeleteTargetHttpProxyRequest, Operation, Operation> deleteOperationSettings() { return deleteOperationSettings; } /** Returns the builder for the settings used for calls to get. */ public UnaryCallSettings.Builder<GetTargetHttpProxyRequest, TargetHttpProxy> getSettings() { return getSettings; } /** Returns the builder for the settings used for calls to insert. */ public UnaryCallSettings.Builder<InsertTargetHttpProxyRequest, Operation> insertSettings() { return insertSettings; } /** Returns the builder for the settings used for calls to insert. */ public OperationCallSettings.Builder<InsertTargetHttpProxyRequest, Operation, Operation> insertOperationSettings() { return insertOperationSettings; } /** Returns the builder for the settings used for calls to list. */ public PagedCallSettings.Builder< ListTargetHttpProxiesRequest, TargetHttpProxyList, ListPagedResponse> listSettings() { return listSettings; } /** Returns the builder for the settings used for calls to patch. */ public UnaryCallSettings.Builder<PatchTargetHttpProxyRequest, Operation> patchSettings() { return patchSettings; } /** Returns the builder for the settings used for calls to patch. */ public OperationCallSettings.Builder<PatchTargetHttpProxyRequest, Operation, Operation> patchOperationSettings() { return patchOperationSettings; } /** Returns the builder for the settings used for calls to setUrlMap. */ public UnaryCallSettings.Builder<SetUrlMapTargetHttpProxyRequest, Operation> setUrlMapSettings() { return setUrlMapSettings; } /** Returns the builder for the settings used for calls to setUrlMap. */ public OperationCallSettings.Builder<SetUrlMapTargetHttpProxyRequest, Operation, Operation> setUrlMapOperationSettings() { return setUrlMapOperationSettings; } @Override public TargetHttpProxiesStubSettings build() throws IOException { return new TargetHttpProxiesStubSettings(this); } } }
apache/hadoop
36,510
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/ServiceLauncher.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.service.launcher; import java.io.File; import java.io.IOException; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.util.Preconditions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.audit.CommonAuditContext; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.service.Service; import org.apache.hadoop.util.ExitCodeProvider; import org.apache.hadoop.util.ExitUtil; import org.apache.hadoop.util.GenericOptionsParser; import org.apache.hadoop.util.StringUtils; /** * A class to launch any YARN service by name. * * It's designed to be subclassed for custom entry points. * * Workflow: * <ol> * <li>An instance of the class is created. It must be of the type * {@link Service}</li> * <li>If it implements * {@link LaunchableService#bindArgs(Configuration, List)}, * it is given the binding args off the CLI after all general configuration * arguments have been stripped.</li> * <li>Its {@link Service#init(Configuration)} and {@link Service#start()} * methods are called.</li> * <li>If it implements it, {@link LaunchableService#execute()} * is called and its return code used as the exit code.</li> * <li>Otherwise: it waits for the service to stop, assuming that the * {@link Service#start()} method spawns one or more thread * to perform work</li> * <li>If any exception is raised and provides an exit code, * that is, it implements {@link ExitCodeProvider}, * the return value of {@link ExitCodeProvider#getExitCode()}, * becomes the exit code of the command.</li> * </ol> * Error and warning messages are logged to {@code stderr}. * * @param <S> service class to cast the generated service to. */ @SuppressWarnings("UseOfSystemOutOrSystemErr") public class ServiceLauncher<S extends Service> implements LauncherExitCodes, LauncherArguments, Thread.UncaughtExceptionHandler { /** * Logger. */ private static final Logger LOG = LoggerFactory.getLogger(ServiceLauncher.class); /** * Priority for the shutdown hook: {@value}. */ protected static final int SHUTDOWN_PRIORITY = 30; /** * The name of this class. */ public static final String NAME = "ServiceLauncher"; protected static final String USAGE_NAME = "Usage: " + NAME; protected static final String USAGE_SERVICE_ARGUMENTS = "service-classname <service arguments>"; /** * Usage message. * * Text: {@value} */ public static final String USAGE_MESSAGE = USAGE_NAME + " [" + ARG_CONF_PREFIXED + " <conf file>]" + " [" + ARG_CONFCLASS_PREFIXED + " <configuration classname>]" + " " + USAGE_SERVICE_ARGUMENTS; /** * The shutdown time on an interrupt: {@value}. */ private static final int SHUTDOWN_TIME_ON_INTERRUPT = 30 * 1000; /** * The launched service. * * Invalid until the service has been created. */ private volatile S service; /** * Exit code of the service. * * Invalid until a service has * executed or stopped, depending on the service type. */ private int serviceExitCode; /** * Any exception raised during execution. */ private ExitUtil.ExitException serviceException; /** * The interrupt escalator for the service. */ private InterruptEscalator interruptEscalator; /** * Configuration used for the service. */ private Configuration configuration; /** * Text description of service for messages. */ private String serviceName; /** * Classname for the service to create.; empty string otherwise. */ private String serviceClassName = ""; /** * List of the standard configurations to create (and so load in properties). * The values are Hadoop, HDFS and YARN configurations. */ protected static final String[] DEFAULT_CONFIGS = { "org.apache.hadoop.conf.Configuration", "org.apache.hadoop.hdfs.HdfsConfiguration", "org.apache.hadoop.yarn.conf.YarnConfiguration" }; /** * List of classnames to load to configuration before creating a * {@link Configuration} instance. */ private List<String> confClassnames = new ArrayList<>(DEFAULT_CONFIGS.length); /** * URLs of configurations to load into the configuration instance created. */ private List<URL> confResourceUrls = new ArrayList<>(1); /** Command options. Preserved for usage statements. */ private Options commandOptions; /** * Create an instance of the launcher. * @param serviceClassName classname of the service */ public ServiceLauncher(String serviceClassName) { this(serviceClassName, serviceClassName); } /** * Create an instance of the launcher. * @param serviceName name of service for text messages * @param serviceClassName classname of the service */ public ServiceLauncher(String serviceName, String serviceClassName) { this.serviceClassName = serviceClassName; this.serviceName = serviceName; // set up initial list of configurations confClassnames.addAll(Arrays.asList(DEFAULT_CONFIGS)); } /** * Get the service. * * Null until * {@link #coreServiceLaunch(Configuration, Service, List, boolean, boolean)} * has completed. * @return the service */ public final S getService() { return service; } /** * Setter is to give subclasses the ability to manipulate the service. * @param s the new service */ protected void setService(S s) { this.service = s; } /** * Get the configuration constructed from the command line arguments. * @return the configuration used to create the service */ public final Configuration getConfiguration() { return configuration; } /** * The exit code from a successful service execution. * @return the exit code. */ public final int getServiceExitCode() { return serviceExitCode; } /** * Get the exit exception used to end this service. * @return an exception, which will be null until the service * has exited (and {@code System.exit} has not been called) */ public final ExitUtil.ExitException getServiceException() { return serviceException; } /** * Probe for service classname being defined. * @return true if the classname is set */ private boolean isClassnameDefined() { return serviceClassName != null && !serviceClassName.isEmpty(); } @Override public String toString() { final StringBuilder sb = new StringBuilder("\"ServiceLauncher for \""); sb.append(serviceName); if (isClassnameDefined()) { sb.append(", serviceClassName='").append(serviceClassName).append('\''); } if (service != null) { sb.append(", service=").append(service); } return sb.toString(); } /** * Launch the service and exit. * * <ol> * <li>Parse the command line.</li> * <li>Build the service configuration from it.</li> * <li>Start the service.</li> * <li>If it is a {@link LaunchableService}: execute it</li> * <li>Otherwise: wait for it to finish.</li> * <li>Exit passing the status code to the {@link #exit(int, String)} * method.</li> * </ol> * @param args arguments to the service. {@code arg[0]} is * assumed to be the service classname. */ public void launchServiceAndExit(List<String> args) { StringBuilder builder = new StringBuilder(); for (String arg : args) { builder.append('"').append(arg).append("\" "); } String argumentString = builder.toString(); if (LOG.isDebugEnabled()) { LOG.debug(startupShutdownMessage(serviceName, args)); LOG.debug(argumentString); } registerFailureHandling(); // set up the configs, using reflection to push in the -site.xml files loadConfigurationClasses(); Configuration conf = createConfiguration(); for (URL resourceUrl : confResourceUrls) { conf.addResource(resourceUrl); } bindCommandOptions(); ExitUtil.ExitException exitException; try { List<String> processedArgs = extractCommandOptions(conf, args); exitException = launchService(conf, processedArgs, true, true); } catch (ExitUtil.ExitException e) { exitException = e; noteException(exitException); } if (exitException.getExitCode() == LauncherExitCodes.EXIT_USAGE) { // something went wrong. Print the usage and commands System.err.println(getUsageMessage()); System.err.println("Command: " + argumentString); } System.out.flush(); System.err.flush(); exit(exitException); } /** * Set the {@link #commandOptions} field to the result of * {@link #createOptions()}; protected for subclasses and test access. */ protected void bindCommandOptions() { commandOptions = createOptions(); } /** * Record that an Exit Exception has been raised. * Save it to {@link #serviceException}, with its exit code in * {@link #serviceExitCode} * @param exitException exception */ void noteException(ExitUtil.ExitException exitException) { int exitCode = exitException.getExitCode(); if (exitCode != 0) { LOG.debug("Exception raised with exit code {}", exitCode, exitException); Throwable cause = exitException.getCause(); if (cause != null) { // log the nested exception in more detail LOG.warn("{}", cause.toString(), cause); } } serviceExitCode = exitCode; serviceException = exitException; } /** * Get the usage message, ideally dynamically. * @return the usage message */ protected String getUsageMessage() { String message = USAGE_MESSAGE; if (commandOptions != null) { message = USAGE_NAME + " " + commandOptions.toString() + " " + USAGE_SERVICE_ARGUMENTS; } return message; } /** * Override point: create an options instance to combine with the * standard options set. * <i>Important. Synchronize uses of {@link Option}</i> * with {@code Option.class} * @return the new options */ @SuppressWarnings("static-access") protected Options createOptions() { synchronized (Option.class) { Options options = new Options(); Option oconf = Option.builder(ARG_CONF_SHORT).argName("configuration file") .hasArg() .desc("specify an application configuration file") .longOpt(ARG_CONF) .build(); Option confclass = Option.builder(ARG_CONFCLASS_SHORT).argName("configuration classname") .hasArg() .desc("Classname of a Hadoop Configuration subclass to load") .longOpt(ARG_CONFCLASS) .build(); Option property = Option.builder("D").argName("property=value") .hasArg() .desc("use value for given property") .build(); options.addOption(oconf); options.addOption(property); options.addOption(confclass); return options; } } /** * Override point: create the base configuration for the service. * * Subclasses can override to create HDFS/YARN configurations etc. * @return the configuration to use as the service initializer. */ protected Configuration createConfiguration() { return new Configuration(); } /** * Override point: Get a list of configuration classes to create. * @return the array of configs to attempt to create. If any are off the * classpath, that is logged */ @SuppressWarnings("ReturnOfCollectionOrArrayField") protected List<String> getConfigurationsToCreate() { return confClassnames; } /** * @return This creates all the configurations defined by * {@link #getConfigurationsToCreate()} , ensuring that * the resources have been pushed in. * If one cannot be loaded it is logged and the operation continues * except in the case that the class does load but it isn't actually * a subclass of {@link Configuration}. * @throws ExitUtil.ExitException if a loaded class is of the wrong type */ @VisibleForTesting public int loadConfigurationClasses() { List<String> toCreate = getConfigurationsToCreate(); int loaded = 0; for (String classname : toCreate) { try { Class<?> loadClass = getClassLoader().loadClass(classname); Object instance = loadClass.getConstructor().newInstance(); if (!(instance instanceof Configuration)) { throw new ExitUtil.ExitException(EXIT_SERVICE_CREATION_FAILURE, "Could not create " + classname + " because it is not a Configuration class/subclass"); } loaded++; } catch (ClassNotFoundException e) { // class could not be found -implies it is not on the current classpath LOG.debug("Failed to load {} because it is not on the classpath", classname); } catch (ExitUtil.ExitException e) { // rethrow throw e; } catch (Exception e) { // any other exception LOG.info("Failed to create {}", classname, e); } } return loaded; } /** * Launch a service catching all exceptions and downgrading them to exit codes * after logging. * * Sets {@link #serviceException} to this value. * @param conf configuration to use * @param processedArgs command line after the launcher-specific arguments * have been stripped out. * @param addShutdownHook should a shutdown hook be added to terminate * this service on shutdown. Tests should set this to false. * @param execute execute/wait for the service to stop. * @return an exit exception, which will have a status code of 0 if it worked */ public ExitUtil.ExitException launchService(Configuration conf, List<String> processedArgs, boolean addShutdownHook, boolean execute) { return launchService(conf, null, processedArgs, addShutdownHook, execute); } /** * Launch a service catching all exceptions and downgrading them to exit codes * after logging. * * Sets {@link #serviceException} to this value. * @param conf configuration to use * @param instance optional instance of the service. * @param processedArgs command line after the launcher-specific arguments * have been stripped out. * @param addShutdownHook should a shutdown hook be added to terminate * this service on shutdown. Tests should set this to false. * @param execute execute/wait for the service to stop. * @return an exit exception, which will have a status code of 0 if it worked */ public ExitUtil.ExitException launchService(Configuration conf, S instance, List<String> processedArgs, boolean addShutdownHook, boolean execute) { ExitUtil.ExitException exitException; try { int exitCode = coreServiceLaunch(conf, instance, processedArgs, addShutdownHook, execute); if (service != null) { // check to see if the service failed Throwable failure = service.getFailureCause(); if (failure != null) { // the service exited with a failure. // check what state it is in Service.STATE failureState = service.getFailureState(); if (failureState == Service.STATE.STOPPED) { // the failure occurred during shutdown, not important enough // to bother the user as it may just scare them LOG.debug("Failure during shutdown: {} ", failure, failure); } else { //throw it for the catch handlers to deal with throw failure; } } } String name = getServiceName(); if (exitCode == 0) { exitException = new ServiceLaunchException(exitCode, "%s succeeded", name); } else { exitException = new ServiceLaunchException(exitCode, "%s failed ", name); } // either the service succeeded, or an error raised during shutdown, // which we don't worry that much about } catch (ExitUtil.ExitException ee) { // exit exceptions are passed through unchanged exitException = ee; } catch (Throwable thrown) { // other errors need a full log. LOG.error("Exception raised {}", service != null ? (service.toString() + " in state " + service.getServiceState()) : "during service instantiation", thrown); exitException = convertToExitException(thrown); } noteException(exitException); return exitException; } /** * Launch the service. * * All exceptions that occur are propagated upwards. * * If the method returns a status code, it means that it got as far starting * the service, and if it implements {@link LaunchableService}, that the * method {@link LaunchableService#execute()} has completed. * * After this method returns, the service can be retrieved returned by * {@link #getService()}. * * @param conf configuration * @param instance optional instance of the service. * @param processedArgs arguments after the configuration parameters * have been stripped out. * @param addShutdownHook should a shutdown hook be added to terminate * this service on shutdown. Tests should set this to false. * @param execute execute/wait for the service to stop * @throws ClassNotFoundException classname not on the classpath * @throws IllegalAccessException not allowed at the class * @throws InstantiationException not allowed to instantiate it * @throws InterruptedException thread interrupted * @throws ExitUtil.ExitException any exception defining the status code. * @throws Exception any other failure -if it implements * {@link ExitCodeProvider} then it defines the exit code for any * containing exception * @return status code. */ protected int coreServiceLaunch(Configuration conf, S instance, List<String> processedArgs, boolean addShutdownHook, boolean execute) throws Exception { // create the service instance if (instance == null) { instantiateService(conf); } else { // service already exists, so instantiate configuration = conf; service = instance; } ServiceShutdownHook shutdownHook = null; // and the shutdown hook if requested if (addShutdownHook) { shutdownHook = new ServiceShutdownHook(service); shutdownHook.register(SHUTDOWN_PRIORITY); } String name = getServiceName(); LOG.debug("Launched service {}", name); CommonAuditContext.noteEntryPoint(service); LaunchableService launchableService = null; if (service instanceof LaunchableService) { // it's a LaunchableService, pass in the conf and arguments before init) LOG.debug("Service {} implements LaunchableService", name); launchableService = (LaunchableService) service; if (launchableService.isInState(Service.STATE.INITED)) { LOG.warn("LaunchableService {}" + " initialized in constructor before CLI arguments passed in", name); } Configuration newconf = launchableService.bindArgs(configuration, processedArgs); if (newconf != null) { configuration = newconf; } } //some class constructors init; here this is picked up on. if (!service.isInState(Service.STATE.INITED)) { service.init(configuration); } int exitCode; try { // start the service service.start(); exitCode = EXIT_SUCCESS; if (execute && service.isInState(Service.STATE.STARTED)) { if (launchableService != null) { // assume that runnable services are meant to run from here try { exitCode = launchableService.execute(); LOG.debug("Service {} execution returned exit code {}", name, exitCode); } finally { // then stop the service service.stop(); } } else { //run the service until it stops or an interrupt happens // on a different thread. LOG.debug("waiting for service threads to terminate"); service.waitForServiceToStop(0); } } } finally { if (shutdownHook != null) { shutdownHook.unregister(); } } return exitCode; } /** * @return Instantiate the service defined in {@code serviceClassName}. * * Sets the {@code configuration} field * to the the value of {@code conf}, * and the {@code service} field to the service created. * * @param conf configuration to use */ @SuppressWarnings("unchecked") public Service instantiateService(Configuration conf) { Preconditions.checkArgument(conf != null, "null conf"); Preconditions.checkArgument(serviceClassName != null, "null service classname"); Preconditions.checkArgument(!serviceClassName.isEmpty(), "undefined service classname"); configuration = conf; // Instantiate the class. this requires the service to have a public // zero-argument or string-argument constructor Object instance; try { Class<?> serviceClass = getClassLoader().loadClass(serviceClassName); try { instance = serviceClass.getConstructor().newInstance(); } catch (NoSuchMethodException noEmptyConstructor) { // no simple constructor, fall back to a string LOG.debug("No empty constructor {}", noEmptyConstructor, noEmptyConstructor); instance = serviceClass.getConstructor(String.class) .newInstance(serviceClassName); } } catch (Exception e) { throw serviceCreationFailure(e); } if (!(instance instanceof Service)) { //not a service throw new ServiceLaunchException( LauncherExitCodes.EXIT_SERVICE_CREATION_FAILURE, "Not a service class: \"%s\"", serviceClassName); } // cast to the specific instance type of this ServiceLauncher service = (S) instance; return service; } /** * Convert an exception to an {@code ExitException}. * * This process may just be a simple pass through, otherwise a new * exception is created with an exit code, the text of the supplied * exception, and the supplied exception as an inner cause. * * <ol> * <li>If is already the right type, pass it through.</li> * <li>If it implements {@link ExitCodeProvider#getExitCode()}, * the exit code is extracted and used in the new exception.</li> * <li>Otherwise, the exit code * {@link LauncherExitCodes#EXIT_EXCEPTION_THROWN} is used.</li> * </ol> * * @param thrown the exception thrown * @return an {@code ExitException} with a status code */ protected static ExitUtil.ExitException convertToExitException( Throwable thrown) { ExitUtil.ExitException exitException; // get the exception message String message = thrown.toString(); int exitCode; if (thrown instanceof ExitCodeProvider) { // the exception provides a status code -extract it exitCode = ((ExitCodeProvider) thrown).getExitCode(); message = thrown.getMessage(); if (message == null) { // some exceptions do not have a message; fall back // to the string value. message = thrown.toString(); } } else { // no exception code: use the default exitCode = EXIT_EXCEPTION_THROWN; } // construct the new exception with the original message and // an exit code exitException = new ServiceLaunchException(exitCode, thrown, message); return exitException; } /** * Generate an exception announcing a failure to create the service. * @param exception inner exception. * @return a new exception, with the exit code * {@link LauncherExitCodes#EXIT_SERVICE_CREATION_FAILURE} */ protected ServiceLaunchException serviceCreationFailure(Exception exception) { return new ServiceLaunchException(EXIT_SERVICE_CREATION_FAILURE, exception); } /** * Override point: register this class as the handler for the control-C * and SIGINT interrupts. * * Subclasses can extend this with extra operations, such as * an exception handler: * <pre> * Thread.setDefaultUncaughtExceptionHandler( * new YarnUncaughtExceptionHandler()); * </pre> */ protected void registerFailureHandling() { try { interruptEscalator = new InterruptEscalator(this, SHUTDOWN_TIME_ON_INTERRUPT); interruptEscalator.register(IrqHandler.CONTROL_C); interruptEscalator.register(IrqHandler.SIGTERM); } catch (IllegalArgumentException e) { // downgrade interrupt registration to warnings LOG.warn("{}", e, e); } Thread.setDefaultUncaughtExceptionHandler( new HadoopUncaughtExceptionHandler(this)); } /** * Handler for uncaught exceptions: terminate the service. * @param thread thread * @param exception exception */ @Override public void uncaughtException(Thread thread, Throwable exception) { LOG.error("Uncaught exception in thread {} -exiting", thread, exception); exit(convertToExitException(exception)); } /** * Get the service name via {@link Service#getName()}. * * If the service is not instantiated, the classname is returned instead. * @return the service name */ public String getServiceName() { Service s = service; String name = null; if (s != null) { try { name = s.getName(); } catch (Exception ignored) { // ignored } } if (name != null) { return "service " + name; } else { return "service " + serviceName; } } /** * Print a warning message. * <p> * This tries to log to the log's warn() operation. * If the log at that level is disabled it logs to system error * @param text warning text */ protected void warn(String text) { if (LOG.isWarnEnabled()) { LOG.warn(text); } else { System.err.println(text); } } /** * Report an error. * <p> * This tries to log to {@code LOG.error()}. * <p> * If that log level is disabled disabled the message * is logged to system error along with {@code thrown.toString()} * @param message message for the user * @param thrown the exception thrown */ protected void error(String message, Throwable thrown) { String text = "Exception: " + message; if (LOG.isErrorEnabled()) { LOG.error(text, thrown); } else { System.err.println(text); if (thrown != null) { System.err.println(thrown.toString()); } } } /** * Exit the JVM. * * This is method can be overridden for testing, throwing an * exception instead. Any subclassed method MUST raise an * {@code ExitException} instance/subclass. * The service launcher code assumes that after this method is invoked, * no other code in the same method is called. * @param exitCode code to exit * @param message input message. */ protected void exit(int exitCode, String message) { ExitUtil.terminate(exitCode, message); } /** * Exit the JVM using an exception for the exit code and message, * invoking {@link ExitUtil#terminate(ExitUtil.ExitException)}. * * This is the standard way a launched service exits. * An error code of 0 means success -nothing is printed. * * If {@link ExitUtil#disableSystemExit()} has been called, this * method will throw the exception. * * The method <i>may</i> be subclassed for testing * @param ee exit exception * @throws ExitUtil.ExitException if ExitUtil exceptions are disabled */ protected void exit(ExitUtil.ExitException ee) { ExitUtil.terminate(ee); } /** * Override point: get the classloader to use. * @return the classloader for loading a service class. */ protected ClassLoader getClassLoader() { return this.getClass().getClassLoader(); } /** * Extract the command options and apply them to the configuration, * building an array of processed arguments to hand down to the service. * * @param conf configuration to update. * @param args main arguments. {@code args[0]}is assumed to be * the service classname and is skipped. * @return the remaining arguments * @throws ExitUtil.ExitException if JVM exiting is disabled. */ public List<String> extractCommandOptions(Configuration conf, List<String> args) { int size = args.size(); if (size <= 1) { return Collections.emptyList(); } List<String> coreArgs = args.subList(1, size); return parseCommandArgs(conf, coreArgs); } /** * Parse the command arguments, extracting the service class as the last * element of the list (after extracting all the rest). * * The field {@link #commandOptions} field must already have been set. * @param conf configuration to use * @param args command line argument list * @return the remaining arguments * @throws ServiceLaunchException if processing of arguments failed */ protected List<String> parseCommandArgs(Configuration conf, List<String> args) { Preconditions.checkNotNull(commandOptions, "Command options have not been created"); StringBuilder argString = new StringBuilder(args.size() * 32); for (String arg : args) { argString.append("\"").append(arg).append("\" "); } LOG.debug("Command line: {}", argString); try { String[] argArray = args.toArray(new String[args.size()]); // parse this the standard way. This will // update the configuration in the parser, and potentially // patch the user credentials GenericOptionsParser parser = createGenericOptionsParser(conf, argArray); if (!parser.isParseSuccessful()) { throw new ServiceLaunchException(EXIT_COMMAND_ARGUMENT_ERROR, E_PARSE_FAILED + " %s", argString); } CommandLine line = parser.getCommandLine(); List<String> remainingArgs = Arrays.asList(parser.getRemainingArgs()); LOG.debug("Remaining arguments {}", remainingArgs); // Scan the list of configuration files // and bail out if they don't exist if (line.hasOption(ARG_CONF)) { String[] filenames = line.getOptionValues(ARG_CONF); verifyConfigurationFilesExist(filenames); // Add URLs of files as list of URLs to load for (String filename : filenames) { File file = new File(filename); LOG.debug("Configuration files {}", file); confResourceUrls.add(file.toURI().toURL()); } } if (line.hasOption(ARG_CONFCLASS)) { // new resources to instantiate as configurations List<String> classnameList = Arrays.asList( line.getOptionValues(ARG_CONFCLASS)); LOG.debug("Configuration classes {}", classnameList); confClassnames.addAll(classnameList); } // return the remainder return remainingArgs; } catch (IOException e) { // parsing problem: convert to a command argument error with // the original text throw new ServiceLaunchException(EXIT_COMMAND_ARGUMENT_ERROR, e); } catch (RuntimeException e) { // lower level issue such as XML parse failure throw new ServiceLaunchException(EXIT_COMMAND_ARGUMENT_ERROR, e, E_PARSE_FAILED + " %s : %s", argString, e); } } /** * Override point: create a generic options parser or subclass thereof. * @param conf Hadoop configuration * @param argArray array of arguments * @return a generic options parser to parse the arguments * @throws IOException on any failure */ protected GenericOptionsParser createGenericOptionsParser(Configuration conf, String[] argArray) throws IOException { return new MinimalGenericOptionsParser(conf, commandOptions, argArray); } /** * Verify that all the specified filenames exist. * @param filenames a list of files * @throws ServiceLaunchException if a file is not found */ protected void verifyConfigurationFilesExist(String[] filenames) { if (filenames == null) { return; } for (String filename : filenames) { File file = new File(filename); LOG.debug("Conf file {}", file.getAbsolutePath()); if (!file.exists()) { // no configuration file throw new ServiceLaunchException(EXIT_NOT_FOUND, ARG_CONF_PREFIXED + ": configuration file not found: %s", file.getAbsolutePath()); } } } /** * @return Build a log message for starting up and shutting down. * @param classname the class of the server * @param args arguments */ protected static String startupShutdownMessage(String classname, List<String> args) { final String hostname = NetUtils.getHostname(); return StringUtils.createStartupShutdownMessage(classname, hostname, args.toArray(new String[args.size()])); } /** * Exit with a printed message. * @param status status code * @param message message message to print before exiting * @throws ExitUtil.ExitException if exceptions are disabled */ protected static void exitWithMessage(int status, String message) { ExitUtil.terminate(new ServiceLaunchException(status, message)); } /** * Exit with the usage exit code {@link #EXIT_USAGE} * and message {@link #USAGE_MESSAGE}. * @throws ExitUtil.ExitException if exceptions are disabled */ protected static void exitWithUsageMessage() { exitWithMessage(EXIT_USAGE, USAGE_MESSAGE); } /** * This is the JVM entry point for the service launcher. * * Converts the arguments to a list, then invokes {@link #serviceMain(List)} * @param args command line arguments. */ public static void main(String[] args) { serviceMain(Arrays.asList(args)); } /** * Varargs version of the entry point for testing and other in-JVM use. * Hands off to {@link #serviceMain(List)} * @param args command line arguments. */ public static void serviceMain(String... args) { serviceMain(Arrays.asList(args)); } /* ====================================================================== */ /** * The real main function, which takes the arguments as a list. * Argument 0 MUST be the service classname * @param argsList the list of arguments */ /* ====================================================================== */ public static void serviceMain(List<String> argsList) { if (argsList.isEmpty()) { // no arguments: usage message exitWithUsageMessage(); } else { ServiceLauncher<Service> serviceLauncher = new ServiceLauncher<>(argsList.get(0)); serviceLauncher.launchServiceAndExit(argsList); } } /** * A generic options parser which does not parse any of the traditional * Hadoop options. */ protected static class MinimalGenericOptionsParser extends GenericOptionsParser { public MinimalGenericOptionsParser(Configuration conf, Options options, String[] args) throws IOException { super(conf, options, args); } @Override protected Options buildGeneralOptions(Options opts) { return opts; } } }
apache/rocketmq
35,477
store/src/main/java/org/apache/rocketmq/store/queue/RocksDBConsumeQueueOffsetTable.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.rocketmq.store.queue; import static org.apache.rocketmq.common.config.AbstractRocksDBStorage.CTRL_1; import io.netty.util.internal.PlatformDependent; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.function.Consumer; import java.util.function.Function; import org.apache.rocketmq.common.MixAll; import org.apache.rocketmq.common.Pair; import org.apache.rocketmq.common.constant.LoggerName; import org.apache.rocketmq.common.topic.TopicValidator; import org.apache.rocketmq.logging.org.slf4j.Logger; import org.apache.rocketmq.logging.org.slf4j.LoggerFactory; import org.apache.rocketmq.store.ConsumeQueue; import org.apache.rocketmq.store.DefaultMessageStore; import org.apache.rocketmq.store.queue.offset.OffsetEntry; import org.apache.rocketmq.store.queue.offset.OffsetEntryType; import org.apache.rocketmq.store.rocksdb.ConsumeQueueRocksDBStorage; import org.rocksdb.ColumnFamilyHandle; import org.rocksdb.RocksDBException; import org.rocksdb.RocksIterator; import org.rocksdb.WriteBatch; public class RocksDBConsumeQueueOffsetTable { private static final Logger log = LoggerFactory.getLogger(LoggerName.STORE_LOGGER_NAME); private static final Logger ERROR_LOG = LoggerFactory.getLogger(LoggerName.STORE_ERROR_LOGGER_NAME); private static final Logger ROCKSDB_LOG = LoggerFactory.getLogger(LoggerName.ROCKSDB_LOGGER_NAME); private static final byte[] MAX_BYTES = "max".getBytes(StandardCharsets.UTF_8); private static final byte[] MIN_BYTES = "min".getBytes(StandardCharsets.UTF_8); /** * Rocksdb ConsumeQueue's Offset unit. Format: * * <pre> * ┌─────────────────────────┬───────────┬───────────────────────┬───────────┬───────────┬───────────┬─────────────┐ * │ Topic Bytes Array Size │ CTRL_1 │ Topic Bytes Array │ CTRL_1 │ Max(Min) │ CTRL_1 │ QueueId │ * │ (4 Bytes) │ (1 Bytes) │ (n Bytes) │ (1 Bytes) │ (3 Bytes) │ (1 Bytes) │ (4 Bytes) │ * ├─────────────────────────┴───────────┴───────────────────────┴───────────┴───────────┴───────────┴─────────────┤ * │ Key Unit │ * │ │ * </pre> * * <pre> * ┌─────────────────────────────┬────────────────────────┐ * │ CommitLog Physical Offset │ ConsumeQueue Offset │ * │ (8 Bytes) │ (8 Bytes) │ * ├─────────────────────────────┴────────────────────────┤ * │ Value Unit │ * │ │ * </pre> * ConsumeQueue's Offset unit. Size: CommitLog Physical Offset(8) + ConsumeQueue Offset(8) = 16 Bytes */ static final int OFFSET_PHY_OFFSET = 0; static final int OFFSET_CQ_OFFSET = 8; /** * ┌─────────────────────────┬───────────┬───────────┬───────────┬───────────┬─────────────┐ * │ Topic Bytes Array Size │ CTRL_1 │ CTRL_1 │ Max(Min) │ CTRL_1 │ QueueId │ * │ (4 Bytes) │ (1 Bytes) │ (1 Bytes) │ (3 Bytes) │ (1 Bytes) │ (4 Bytes) │ * ├─────────────────────────┴───────────┴───────────┴───────────┴───────────┴─────────────┤ */ public static final int OFFSET_KEY_LENGTH_WITHOUT_TOPIC_BYTES = 4 + 1 + 1 + 3 + 1 + 4; private static final int OFFSET_VALUE_LENGTH = 8 + 8; /** * ┌─────────────────────────┬───────────┬───────────┬───────────┬───────────┐ * │ Topic Bytes Array Size │ CTRL_1 │ CTRL_1 │ Max(Min) │ CTRL_1 │ * │ (4 Bytes) │ (1 Bytes) │ (1 Bytes) │ (3 Bytes) │ (1 Bytes) │ * ├─────────────────────────┴───────────┴───────────┴───────────┴───────────┤ */ public static final int OFFSET_KEY_LENGTH_WITHOUT_TOPIC_QUEUE_ID_BYTES = 4 + 1 + 1 + 3 + 1; /** * We use a new system topic='CHECKPOINT_TOPIC' to record the maxPhyOffset built by CQ dispatch thread. * * @see ConsumeQueueStore#getMaxPhyOffsetInConsumeQueue(), we use it to find the maxPhyOffset built by CQ dispatch thread. * If we do not record the maxPhyOffset, it may take us a long time to start traversing from the head of * RocksDBConsumeQueueOffsetTable to find it. */ private static final String MAX_PHYSICAL_OFFSET_CHECKPOINT = TopicValidator.RMQ_SYS_ROCKSDB_OFFSET_TOPIC; private static final byte[] MAX_PHYSICAL_OFFSET_CHECKPOINT_BYTES = MAX_PHYSICAL_OFFSET_CHECKPOINT.getBytes(StandardCharsets.UTF_8); private static final int INNER_CHECKPOINT_TOPIC_LEN = OFFSET_KEY_LENGTH_WITHOUT_TOPIC_BYTES + MAX_PHYSICAL_OFFSET_CHECKPOINT_BYTES.length; private static final ByteBuffer INNER_CHECKPOINT_TOPIC = ByteBuffer.allocateDirect(INNER_CHECKPOINT_TOPIC_LEN); private static final byte[] MAX_PHYSICAL_OFFSET_CHECKPOINT_KEY = new byte[INNER_CHECKPOINT_TOPIC_LEN]; private final ByteBuffer maxPhyOffsetBB; static { buildOffsetKeyByteBuffer0(INNER_CHECKPOINT_TOPIC, MAX_PHYSICAL_OFFSET_CHECKPOINT_BYTES, 0, true); INNER_CHECKPOINT_TOPIC.position(0).limit(INNER_CHECKPOINT_TOPIC_LEN); INNER_CHECKPOINT_TOPIC.get(MAX_PHYSICAL_OFFSET_CHECKPOINT_KEY); } private final RocksDBConsumeQueueTable rocksDBConsumeQueueTable; private final ConsumeQueueRocksDBStorage rocksDBStorage; private final DefaultMessageStore messageStore; private ColumnFamilyHandle offsetCFH; /** * Although we have already put max(min) consumeQueueOffset and physicalOffset in rocksdb, we still hope to get them * from heap to avoid accessing rocksdb. * * @see ConsumeQueue#getMaxPhysicOffset(), maxPhysicOffset --> topicQueueMaxCqOffset * @see ConsumeQueue#getMinLogicOffset(), minLogicOffset --> topicQueueMinOffset */ private final ConcurrentMap<String/* topic-queueId */, PhyAndCQOffset> topicQueueMinOffset; private final ConcurrentMap<String/* topic-queueId */, Long> topicQueueMaxCqOffset; public RocksDBConsumeQueueOffsetTable(RocksDBConsumeQueueTable rocksDBConsumeQueueTable, ConsumeQueueRocksDBStorage rocksDBStorage, DefaultMessageStore messageStore) { this.rocksDBConsumeQueueTable = rocksDBConsumeQueueTable; this.rocksDBStorage = rocksDBStorage; this.messageStore = messageStore; this.topicQueueMinOffset = new ConcurrentHashMap<>(1024); this.topicQueueMaxCqOffset = new ConcurrentHashMap<>(1024); this.maxPhyOffsetBB = ByteBuffer.allocateDirect(8); } public void load() { this.offsetCFH = this.rocksDBStorage.getOffsetCFHandle(); loadMaxConsumeQueueOffsets(); } public Set<Integer> scanAllQueueIdInTopic(String topic) throws RocksDBException { Set<Integer> queueIdSet = new HashSet<>(); byte[] topicBytes = topic.getBytes(StandardCharsets.UTF_8); ByteBuffer byteBuffer = ByteBuffer.allocate(OFFSET_KEY_LENGTH_WITHOUT_TOPIC_QUEUE_ID_BYTES + topicBytes.length); byteBuffer.putInt(topicBytes.length).put(CTRL_1).put(topicBytes).put(CTRL_1).put(MAX_BYTES).put(CTRL_1); byteBuffer.flip(); byte[] prefix = byteBuffer.array(); rocksDBStorage.iterate(offsetCFH, prefix, (keyBytes, unused) -> { ByteBuffer keyBuffer = ByteBuffer.wrap(keyBytes); keyBuffer.position(prefix.length); int queueId = keyBuffer.getInt(); queueIdSet.add(queueId); }); return queueIdSet; } private void loadMaxConsumeQueueOffsets() { Function<OffsetEntry, Boolean> predicate = entry -> entry.type == OffsetEntryType.MAXIMUM; Consumer<OffsetEntry> fn = entry -> { topicQueueMaxCqOffset.putIfAbsent(entry.topic + "-" + entry.queueId, entry.offset); log.info("LoadMaxConsumeQueueOffsets Max {}:{} --> {}|{}", entry.topic, entry.queueId, entry.offset, entry.commitLogOffset); }; try { forEach(predicate, fn); } catch (RocksDBException e) { log.error("Failed to maximum consume queue offset", e); } } public void forEach(Function<OffsetEntry, Boolean> predicate, Consumer<OffsetEntry> fn) throws RocksDBException { try (RocksIterator iterator = this.rocksDBStorage.seekOffsetCF()) { if (null == iterator) { return; } int keyBufferCapacity = 256; iterator.seekToFirst(); ByteBuffer keyBuffer = ByteBuffer.allocateDirect(keyBufferCapacity); ByteBuffer valueBuffer = ByteBuffer.allocateDirect(16); while (iterator.isValid()) { // parse key buffer according to key layout keyBuffer.clear(); // clear position and limit before reuse int total = iterator.key(keyBuffer); if (total > keyBufferCapacity) { keyBufferCapacity = total; PlatformDependent.freeDirectBuffer(keyBuffer); keyBuffer = ByteBuffer.allocateDirect(keyBufferCapacity); continue; } if (keyBuffer.remaining() <= OFFSET_KEY_LENGTH_WITHOUT_TOPIC_BYTES) { iterator.next(); ROCKSDB_LOG.warn("Malformed Key/Value pair"); continue; } int topicLength = keyBuffer.getInt(); byte ctrl1 = keyBuffer.get(); assert ctrl1 == CTRL_1; byte[] topicBytes = new byte[topicLength]; keyBuffer.get(topicBytes); ctrl1 = keyBuffer.get(); assert ctrl1 == CTRL_1; String topic = new String(topicBytes, StandardCharsets.UTF_8); byte[] minMax = new byte[3]; keyBuffer.get(minMax); OffsetEntryType entryType; if (Arrays.equals(minMax, MAX_BYTES)) { entryType = OffsetEntryType.MAXIMUM; } else { entryType = OffsetEntryType.MINIMUM; } ctrl1 = keyBuffer.get(); assert ctrl1 == CTRL_1; assert keyBuffer.remaining() == Integer.BYTES; int queueId = keyBuffer.getInt(); // Read and parse value buffer according to value layout valueBuffer.clear(); // clear position and limit before reuse total = iterator.value(valueBuffer); if (total != Long.BYTES + Long.BYTES) { // Skip system checkpoint topic as its value is only 8 bytes iterator.next(); continue; } long commitLogOffset = valueBuffer.getLong(); long consumeOffset = valueBuffer.getLong(); OffsetEntry entry = new OffsetEntry(); entry.topic = topic; entry.queueId = queueId; entry.type = entryType; entry.offset = consumeOffset; entry.commitLogOffset = commitLogOffset; if (predicate.apply(entry)) { fn.accept(entry); } iterator.next(); } // clean up direct buffers PlatformDependent.freeDirectBuffer(keyBuffer); PlatformDependent.freeDirectBuffer(valueBuffer); } } public void putMaxPhyAndCqOffset(final Map<ByteBuffer, Pair<ByteBuffer, DispatchEntry>> tempTopicQueueMaxOffsetMap, final WriteBatch writeBatch, final long maxPhyOffset) throws RocksDBException { for (Map.Entry<ByteBuffer, Pair<ByteBuffer, DispatchEntry>> entry : tempTopicQueueMaxOffsetMap.entrySet()) { writeBatch.put(this.offsetCFH, entry.getKey(), entry.getValue().getObject1()); } appendMaxPhyOffset(writeBatch, maxPhyOffset); } public void putHeapMaxCqOffset(final Map<ByteBuffer, Pair<ByteBuffer, DispatchEntry>> tempTopicQueueMaxOffsetMap) { for (Map.Entry<ByteBuffer, Pair<ByteBuffer, DispatchEntry>> entry : tempTopicQueueMaxOffsetMap.entrySet()) { DispatchEntry dispatchEntry = entry.getValue().getObject2(); String topic = new String(dispatchEntry.topic, StandardCharsets.UTF_8); putHeapMaxCqOffset(topic, dispatchEntry.queueId, dispatchEntry.queueOffset); } } /** * When topic is deleted, we clean up its offset info in rocksdb. * * @param topic * @param queueId * @throws RocksDBException */ public void destroyOffset(String topic, int queueId, WriteBatch writeBatch) throws RocksDBException { final byte[] topicBytes = topic.getBytes(StandardCharsets.UTF_8); final ByteBuffer minOffsetKey = buildOffsetKeyByteBuffer(topicBytes, queueId, false); byte[] minOffsetBytes = this.rocksDBStorage.getOffset(minOffsetKey.array()); Long startCQOffset = (minOffsetBytes != null) ? ByteBuffer.wrap(minOffsetBytes).getLong(OFFSET_CQ_OFFSET) : null; final ByteBuffer maxOffsetKey = buildOffsetKeyByteBuffer(topicBytes, queueId, true); byte[] maxOffsetBytes = this.rocksDBStorage.getOffset(maxOffsetKey.array()); Long endCQOffset = (maxOffsetBytes != null) ? ByteBuffer.wrap(maxOffsetBytes).getLong(OFFSET_CQ_OFFSET) : null; writeBatch.delete(this.offsetCFH, minOffsetKey.array()); writeBatch.delete(this.offsetCFH, maxOffsetKey.array()); String topicQueueId = buildTopicQueueId(topic, queueId); removeHeapMinCqOffset(topicQueueId); removeHeapMaxCqOffset(topicQueueId); log.info("RocksDB offset table delete topic: {}, queueId: {}, minOffset: {}, maxOffset: {}", topic, queueId, startCQOffset, endCQOffset); } private void appendMaxPhyOffset(final WriteBatch writeBatch, final long maxPhyOffset) throws RocksDBException { final ByteBuffer maxPhyOffsetBB = this.maxPhyOffsetBB; maxPhyOffsetBB.position(0).limit(8); maxPhyOffsetBB.putLong(maxPhyOffset); maxPhyOffsetBB.flip(); INNER_CHECKPOINT_TOPIC.position(0).limit(INNER_CHECKPOINT_TOPIC_LEN); writeBatch.put(this.offsetCFH, INNER_CHECKPOINT_TOPIC, maxPhyOffsetBB); } public long getMaxPhyOffset() throws RocksDBException { byte[] valueBytes = this.rocksDBStorage.getOffset(MAX_PHYSICAL_OFFSET_CHECKPOINT_KEY); if (valueBytes == null) { return 0; } ByteBuffer valueBB = ByteBuffer.wrap(valueBytes); return valueBB.getLong(0); } /** * Traverse the offset table to find dirty topic * * @param existTopicSet * @return */ public Map<String, Set<Integer>> iterateOffsetTable2FindDirty(final Set<String> existTopicSet) { Map<String/* topic */, Set<Integer/* queueId */>> topicQueueIdToBeDeletedMap = new HashMap<>(); try (RocksIterator iterator = rocksDBStorage.seekOffsetCF()) { if (iterator == null) { return topicQueueIdToBeDeletedMap; } for (iterator.seekToFirst(); iterator.isValid(); iterator.next()) { byte[] key = iterator.key(); byte[] value = iterator.value(); if (key == null || key.length <= OFFSET_KEY_LENGTH_WITHOUT_TOPIC_BYTES || value == null || value.length != OFFSET_VALUE_LENGTH) { continue; } ByteBuffer keyBB = ByteBuffer.wrap(key); int topicLen = keyBB.getInt(0); byte[] topicBytes = new byte[topicLen]; /* * "Topic Bytes Array Size" + "CTRL_1" = 4 + 1 */ keyBB.position(4 + 1); keyBB.get(topicBytes); String topic = new String(topicBytes, StandardCharsets.UTF_8); if (TopicValidator.isSystemTopic(topic)) { continue; } // LMQ topic offsets should NOT be removed if (MixAll.isLmq(topic)) { continue; } /* * "Topic Bytes Array Size" + "CTRL_1" + "Topic Bytes Array" + "CTRL_1" + "Max(min)" + "CTRL_1" * = 4 + 1 + topicLen + 1 + 3 + 1 */ int queueId = keyBB.getInt(4 + 1 + topicLen + 1 + 3 + 1); if (!existTopicSet.contains(topic)) { ByteBuffer valueBB = ByteBuffer.wrap(value); long cqOffset = valueBB.getLong(OFFSET_CQ_OFFSET); Set<Integer> topicQueueIdSet = topicQueueIdToBeDeletedMap.get(topic); if (topicQueueIdSet == null) { Set<Integer> newSet = new HashSet<>(); newSet.add(queueId); topicQueueIdToBeDeletedMap.put(topic, newSet); } else { topicQueueIdSet.add(queueId); } ERROR_LOG.info("RocksDBConsumeQueueOffsetTable has dirty cqOffset. topic: {}, queueId: {}, cqOffset: {}", topic, queueId, cqOffset); } } } catch (Exception e) { ERROR_LOG.error("iterateOffsetTable2MarkDirtyCQ Failed.", e); } return topicQueueIdToBeDeletedMap; } public Long getMaxCqOffset(String topic, int queueId) throws RocksDBException { Long maxCqOffset = getHeapMaxCqOffset(topic, queueId); if (maxCqOffset == null) { final ByteBuffer byteBuffer = getMaxPhyAndCqOffsetInKV(topic, queueId); maxCqOffset = (byteBuffer != null) ? byteBuffer.getLong(OFFSET_CQ_OFFSET) : null; String topicQueueId = buildTopicQueueId(topic, queueId); long offset = maxCqOffset != null ? maxCqOffset : -1L; Long prev = this.topicQueueMaxCqOffset.putIfAbsent(topicQueueId, offset); if (null == prev) { ROCKSDB_LOG.info("Max offset of {} is initialized to {} according to RocksDB", topicQueueId, offset); } if (messageStore.getMessageStoreConfig().isEnableRocksDBLog()) { ROCKSDB_LOG.warn("updateMaxOffsetInQueue. {}, {}", topicQueueId, offset); } } return maxCqOffset; } /** * truncate dirty offset in rocksdb * * @param offsetToTruncate * @throws RocksDBException */ public void truncateDirty(long offsetToTruncate) throws RocksDBException { correctMaxPyhOffset(offsetToTruncate); Function<OffsetEntry, Boolean> predicate = entry -> { if (entry.type == OffsetEntryType.MINIMUM) { return false; } // Normal entry offset MUST have the following inequality // entry commit-log offset + message-size-in-bytes <= offsetToTruncate; // otherwise, the consume queue contains dirty records to truncate; // // If the broker node is configured to use async-flush, it's possible consume queues contain // pointers to message records that is not flushed and lost during restart. return entry.commitLogOffset >= offsetToTruncate; }; Consumer<OffsetEntry> fn = entry -> { try { truncateDirtyOffset(entry.topic, entry.queueId); } catch (RocksDBException e) { log.error("Failed to truncate maximum offset of consume queue[topic={}, queue-id={}]", entry.topic, entry.queueId, e); } }; forEach(predicate, fn); } private Pair<Boolean, Long> isMinOffsetOk(final String topic, final int queueId, final long minPhyOffset) throws RocksDBException { PhyAndCQOffset phyAndCQOffset = getHeapMinOffset(topic, queueId); if (phyAndCQOffset != null) { final long phyOffset = phyAndCQOffset.getPhyOffset(); final long cqOffset = phyAndCQOffset.getCqOffset(); return (phyOffset >= minPhyOffset) ? new Pair<>(true, cqOffset) : new Pair<>(false, cqOffset); } ByteBuffer byteBuffer = getMinPhyAndCqOffsetInKV(topic, queueId); if (byteBuffer == null) { return new Pair<>(false, 0L); } final long phyOffset = byteBuffer.getLong(OFFSET_PHY_OFFSET); final long cqOffset = byteBuffer.getLong(OFFSET_CQ_OFFSET); if (phyOffset >= minPhyOffset) { String topicQueueId = buildTopicQueueId(topic, queueId); PhyAndCQOffset newPhyAndCQOffset = new PhyAndCQOffset(phyOffset, cqOffset); this.topicQueueMinOffset.putIfAbsent(topicQueueId, newPhyAndCQOffset); if (messageStore.getMessageStoreConfig().isEnableRocksDBLog()) { ROCKSDB_LOG.warn("updateMinOffsetInQueue. {}, {}", topicQueueId, newPhyAndCQOffset); } return new Pair<>(true, cqOffset); } return new Pair<>(false, cqOffset); } private void truncateDirtyOffset(String topic, int queueId) throws RocksDBException { final ByteBuffer byteBuffer = getMaxPhyAndCqOffsetInKV(topic, queueId); if (byteBuffer == null) { return; } long maxPhyOffset = byteBuffer.getLong(OFFSET_PHY_OFFSET); long maxCqOffset = byteBuffer.getLong(OFFSET_CQ_OFFSET); long maxPhyOffsetInCQ = getMaxPhyOffset(); if (maxPhyOffset >= maxPhyOffsetInCQ) { correctMaxCqOffset(topic, queueId, maxCqOffset, maxPhyOffsetInCQ); Long newMaxCqOffset = getHeapMaxCqOffset(topic, queueId); ROCKSDB_LOG.warn("truncateDirtyLogicFile topic: {}, queueId: {} from {} to {}", topic, queueId, maxPhyOffset, newMaxCqOffset); } } private void correctMaxPyhOffset(long maxPhyOffset) throws RocksDBException { if (!this.rocksDBStorage.hold()) { return; } try (WriteBatch writeBatch = new WriteBatch()) { long oldMaxPhyOffset = getMaxPhyOffset(); if (oldMaxPhyOffset <= maxPhyOffset) { return; } log.info("correctMaxPyhOffset, oldMaxPhyOffset={}, newMaxPhyOffset={}", oldMaxPhyOffset, maxPhyOffset); appendMaxPhyOffset(writeBatch, maxPhyOffset); this.rocksDBStorage.batchPut(writeBatch); } catch (RocksDBException e) { ERROR_LOG.error("correctMaxPyhOffset Failed.", e); throw e; } finally { this.rocksDBStorage.release(); } } public long getMinCqOffset(String topic, int queueId) throws RocksDBException { final long minPhyOffset = this.messageStore.getMinPhyOffset(); Pair<Boolean, Long> pair = isMinOffsetOk(topic, queueId, minPhyOffset); final long cqOffset = pair.getObject2(); if (!pair.getObject1() && correctMinCqOffset(topic, queueId, cqOffset, minPhyOffset)) { PhyAndCQOffset phyAndCQOffset = getHeapMinOffset(topic, queueId); if (phyAndCQOffset != null) { if (this.messageStore.getMessageStoreConfig().isEnableRocksDBLog()) { ROCKSDB_LOG.warn("getMinOffsetInQueue miss heap. topic: {}, queueId: {}, old: {}, new: {}", topic, queueId, cqOffset, phyAndCQOffset); } return phyAndCQOffset.getCqOffset(); } } return cqOffset; } public Long getMaxPhyOffset(String topic, int queueId) { try { ByteBuffer byteBuffer = getMaxPhyAndCqOffsetInKV(topic, queueId); if (byteBuffer != null) { return byteBuffer.getLong(OFFSET_PHY_OFFSET); } } catch (Exception e) { ERROR_LOG.info("getMaxPhyOffset error. topic: {}, queueId: {}", topic, queueId); } return null; } private ByteBuffer getMinPhyAndCqOffsetInKV(String topic, int queueId) throws RocksDBException { return getPhyAndCqOffsetInKV(topic, queueId, false); } private ByteBuffer getMaxPhyAndCqOffsetInKV(String topic, int queueId) throws RocksDBException { return getPhyAndCqOffsetInKV(topic, queueId, true); } private ByteBuffer getPhyAndCqOffsetInKV(String topic, int queueId, boolean max) throws RocksDBException { final byte[] topicBytes = topic.getBytes(StandardCharsets.UTF_8); final ByteBuffer keyBB = buildOffsetKeyByteBuffer(topicBytes, queueId, max); byte[] value = this.rocksDBStorage.getOffset(keyBB.array()); return (value != null) ? ByteBuffer.wrap(value) : null; } private String buildTopicQueueId(final String topic, final int queueId) { return topic + "-" + queueId; } private void putHeapMinCqOffset(final String topic, final int queueId, final long minPhyOffset, final long minCQOffset) { String topicQueueId = buildTopicQueueId(topic, queueId); PhyAndCQOffset phyAndCQOffset = new PhyAndCQOffset(minPhyOffset, minCQOffset); this.topicQueueMinOffset.put(topicQueueId, phyAndCQOffset); } private void putHeapMaxCqOffset(final String topic, final int queueId, final long maxOffset) { String topicQueueId = buildTopicQueueId(topic, queueId); Long prev = this.topicQueueMaxCqOffset.put(topicQueueId, maxOffset); if (prev != null && prev > maxOffset) { ERROR_LOG.error("Max offset of consume-queue[topic={}, queue-id={}] regressed. prev-max={}, current-max={}", topic, queueId, prev, maxOffset); } } private PhyAndCQOffset getHeapMinOffset(final String topic, final int queueId) { return this.topicQueueMinOffset.get(buildTopicQueueId(topic, queueId)); } private Long getHeapMaxCqOffset(final String topic, final int queueId) { String topicQueueId = buildTopicQueueId(topic, queueId); return this.topicQueueMaxCqOffset.get(topicQueueId); } private PhyAndCQOffset removeHeapMinCqOffset(String topicQueueId) { return this.topicQueueMinOffset.remove(topicQueueId); } private Long removeHeapMaxCqOffset(String topicQueueId) { return this.topicQueueMaxCqOffset.remove(topicQueueId); } public void updateCqOffset(final String topic, final int queueId, final long phyOffset, final long cqOffset, boolean max) throws RocksDBException { if (!this.rocksDBStorage.hold()) { return; } try (WriteBatch writeBatch = new WriteBatch()) { final byte[] topicBytes = topic.getBytes(StandardCharsets.UTF_8); final ByteBuffer offsetKey = buildOffsetKeyByteBuffer(topicBytes, queueId, max); final ByteBuffer offsetValue = buildOffsetValueByteBuffer(phyOffset, cqOffset); writeBatch.put(this.offsetCFH, offsetKey.array(), offsetValue.array()); this.rocksDBStorage.batchPut(writeBatch); if (max) { putHeapMaxCqOffset(topic, queueId, cqOffset); } else { putHeapMinCqOffset(topic, queueId, phyOffset, cqOffset); } } catch (RocksDBException e) { ERROR_LOG.error("updateCqOffset({}) failed.", max ? "max" : "min", e); throw e; } finally { this.rocksDBStorage.release(); if (messageStore.getMessageStoreConfig().isEnableRocksDBLog()) { ROCKSDB_LOG.warn("updateCqOffset({}). topic: {}, queueId: {}, phyOffset: {}, cqOffset: {}", max ? "max" : "min", topic, queueId, phyOffset, cqOffset); } } } private boolean correctMaxCqOffset(final String topic, final int queueId, final long maxCQOffset, final long maxPhyOffsetInCQ) throws RocksDBException { // 'getMinOffsetInQueue' may correct minCqOffset and put it into heap long minCQOffset = getMinCqOffset(topic, queueId); PhyAndCQOffset minPhyAndCQOffset = getHeapMinOffset(topic, queueId); if (minPhyAndCQOffset == null || minPhyAndCQOffset.getCqOffset() != minCQOffset || minPhyAndCQOffset.getPhyOffset() > maxPhyOffsetInCQ) { ROCKSDB_LOG.info("[BUG] correctMaxCqOffset error! topic: {}, queueId: {}, maxPhyOffsetInCQ: {}, " + "minCqOffset: {}, phyAndCQOffset: {}", topic, queueId, maxPhyOffsetInCQ, minCQOffset, minPhyAndCQOffset); throw new RocksDBException("correctMaxCqOffset error"); } PhyAndCQOffset targetPhyAndCQOffset = this.rocksDBConsumeQueueTable.binarySearchInCQ(topic, queueId, maxCQOffset, minCQOffset, maxPhyOffsetInCQ, false); long targetCQOffset = targetPhyAndCQOffset.getCqOffset(); long targetPhyOffset = targetPhyAndCQOffset.getPhyOffset(); if (targetCQOffset == -1) { if (maxCQOffset != minCQOffset) { updateCqOffset(topic, queueId, minPhyAndCQOffset.getPhyOffset(), minCQOffset, true); } if (messageStore.getMessageStoreConfig().isEnableRocksDBLog()) { ROCKSDB_LOG.warn("correct error. {}, {}, {}, {}, {}", topic, queueId, minCQOffset, maxCQOffset, minPhyAndCQOffset.getPhyOffset()); } return false; } else { updateCqOffset(topic, queueId, targetPhyOffset, targetCQOffset, true); return true; } } private boolean correctMinCqOffset(final String topic, final int queueId, final long minCQOffset, final long minPhyOffset) throws RocksDBException { final ByteBuffer maxBB = getMaxPhyAndCqOffsetInKV(topic, queueId); if (maxBB == null) { updateCqOffset(topic, queueId, minPhyOffset, 0L, false); return true; } final long maxPhyOffset = maxBB.getLong(OFFSET_PHY_OFFSET); final long maxCQOffset = maxBB.getLong(OFFSET_CQ_OFFSET); if (maxPhyOffset < minPhyOffset) { updateCqOffset(topic, queueId, minPhyOffset, maxCQOffset + 1, false); return true; } PhyAndCQOffset phyAndCQOffset = this.rocksDBConsumeQueueTable.binarySearchInCQ(topic, queueId, maxCQOffset, minCQOffset, minPhyOffset, true); long targetCQOffset = phyAndCQOffset.getCqOffset(); long targetPhyOffset = phyAndCQOffset.getPhyOffset(); if (targetCQOffset == -1) { if (maxCQOffset != minCQOffset) { updateCqOffset(topic, queueId, maxPhyOffset, maxCQOffset, false); } if (messageStore.getMessageStoreConfig().isEnableRocksDBLog()) { ROCKSDB_LOG.warn("correct error. {}, {}, {}, {}, {}", topic, queueId, minCQOffset, maxCQOffset, minPhyOffset); } return false; } else { updateCqOffset(topic, queueId, targetPhyOffset, targetCQOffset, false); return true; } } public static Pair<ByteBuffer, ByteBuffer> getOffsetByteBufferPair() { ByteBuffer offsetKey = ByteBuffer.allocateDirect(RocksDBConsumeQueueStore.MAX_KEY_LEN); ByteBuffer offsetValue = ByteBuffer.allocateDirect(OFFSET_VALUE_LENGTH); return new Pair<>(offsetKey, offsetValue); } static void buildOffsetKeyAndValueByteBuffer(final Pair<ByteBuffer, ByteBuffer> offsetBBPair, final DispatchEntry entry) { final ByteBuffer offsetKey = offsetBBPair.getObject1(); buildOffsetKeyByteBuffer(offsetKey, entry.topic, entry.queueId, true); final ByteBuffer offsetValue = offsetBBPair.getObject2(); buildOffsetValueByteBuffer(offsetValue, entry.commitLogOffset, entry.queueOffset); } private static ByteBuffer buildOffsetKeyByteBuffer(final byte[] topicBytes, final int queueId, final boolean max) { ByteBuffer byteBuffer = ByteBuffer.allocate(OFFSET_KEY_LENGTH_WITHOUT_TOPIC_BYTES + topicBytes.length); buildOffsetKeyByteBuffer0(byteBuffer, topicBytes, queueId, max); return byteBuffer; } public static void buildOffsetKeyByteBuffer(final ByteBuffer byteBuffer, final byte[] topicBytes, final int queueId, final boolean max) { byteBuffer.position(0).limit(OFFSET_KEY_LENGTH_WITHOUT_TOPIC_BYTES + topicBytes.length); buildOffsetKeyByteBuffer0(byteBuffer, topicBytes, queueId, max); } private static void buildOffsetKeyByteBuffer0(final ByteBuffer byteBuffer, final byte[] topicBytes, final int queueId, final boolean max) { byteBuffer.putInt(topicBytes.length).put(CTRL_1).put(topicBytes).put(CTRL_1); if (max) { byteBuffer.put(MAX_BYTES); } else { byteBuffer.put(MIN_BYTES); } byteBuffer.put(CTRL_1).putInt(queueId); byteBuffer.flip(); } private static void buildOffsetValueByteBuffer(final ByteBuffer byteBuffer, final long phyOffset, final long cqOffset) { byteBuffer.position(0).limit(OFFSET_VALUE_LENGTH); buildOffsetValueByteBuffer0(byteBuffer, phyOffset, cqOffset); } private static ByteBuffer buildOffsetValueByteBuffer(final long phyOffset, final long cqOffset) { final ByteBuffer byteBuffer = ByteBuffer.allocate(OFFSET_VALUE_LENGTH); buildOffsetValueByteBuffer0(byteBuffer, phyOffset, cqOffset); return byteBuffer; } private static void buildOffsetValueByteBuffer0(final ByteBuffer byteBuffer, final long phyOffset, final long cqOffset) { byteBuffer.putLong(phyOffset).putLong(cqOffset); byteBuffer.flip(); } static class PhyAndCQOffset { private final long phyOffset; private final long cqOffset; public PhyAndCQOffset(final long phyOffset, final long cqOffset) { this.phyOffset = phyOffset; this.cqOffset = cqOffset; } public long getPhyOffset() { return this.phyOffset; } public long getCqOffset() { return this.cqOffset; } @Override public String toString() { return "[cqOffset=" + cqOffset + ", phyOffset=" + phyOffset + "]"; } } }
google/j2objc
36,456
jre_emul/android/platform/libcore/luni/src/test/java/libcore/java/nio/channels/OldFileChannelTest.java
/* Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package libcore.java.nio.channels; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.RandomAccessFile; import java.io.UnsupportedEncodingException; import java.nio.ByteBuffer; import java.nio.MappedByteBuffer; import java.nio.channels.ClosedChannelException; import java.nio.channels.DatagramChannel; import java.nio.channels.FileChannel; import java.nio.channels.FileLock; import java.nio.channels.NonWritableChannelException; import java.nio.channels.OverlappingFileLockException; import java.nio.channels.ReadableByteChannel; import java.nio.channels.WritableByteChannel; import java.util.Arrays; import junit.framework.TestCase; public final class OldFileChannelTest extends TestCase { private static final int CAPACITY = 100; private static final String CONTENT = "MYTESTSTRING needs to be a little long"; private static final byte[] TEST_BYTES; static { try { TEST_BYTES = "test".getBytes("iso8859-1"); } catch (UnsupportedEncodingException e) { throw new Error(e); } } private static final int CONTENT_LENGTH = CONTENT.length(); private static final byte[] CONTENT_AS_BYTES = CONTENT.getBytes(); private static final int CONTENT_AS_BYTES_LENGTH = CONTENT_AS_BYTES.length; private FileChannel readOnlyFileChannel; private FileChannel writeOnlyFileChannel; private FileChannel readWriteFileChannel; private File fileOfReadOnlyFileChannel; private File fileOfWriteOnlyFileChannel; private File fileOfReadWriteFileChannel; // to read content from FileChannel private FileInputStream fis; private FileLock fileLock; protected void setUp() throws Exception { fileOfReadOnlyFileChannel = File.createTempFile( "File_of_readOnlyFileChannel", "tmp"); fileOfReadOnlyFileChannel.deleteOnExit(); fileOfWriteOnlyFileChannel = File.createTempFile( "File_of_writeOnlyFileChannel", "tmp"); fileOfWriteOnlyFileChannel.deleteOnExit(); fileOfReadWriteFileChannel = File.createTempFile( "File_of_readWriteFileChannel", "tmp"); fileOfReadWriteFileChannel.deleteOnExit(); fis = null; fileLock = null; readOnlyFileChannel = new FileInputStream(fileOfReadOnlyFileChannel) .getChannel(); writeOnlyFileChannel = new FileOutputStream(fileOfWriteOnlyFileChannel) .getChannel(); readWriteFileChannel = new RandomAccessFile(fileOfReadWriteFileChannel, "rw").getChannel(); } protected void tearDown() { if (null != readOnlyFileChannel) { try { readOnlyFileChannel.close(); } catch (IOException e) { // do nothing } } if (null != writeOnlyFileChannel) { try { writeOnlyFileChannel.close(); } catch (IOException e) { // do nothing } } if (null != readWriteFileChannel) { try { readWriteFileChannel.close(); } catch (IOException e) { // do nothing } } if (null != fis) { try { fis.close(); } catch (IOException e) { // do nothing } } if (null != fileLock) { try { fileLock.release(); } catch (IOException e) { // do nothing } } if (null != fileOfReadOnlyFileChannel) { fileOfReadOnlyFileChannel.delete(); } if (null != fileOfWriteOnlyFileChannel) { fileOfWriteOnlyFileChannel.delete(); } if (null != fileOfReadWriteFileChannel) { fileOfReadWriteFileChannel.delete(); } } public void test_forceZ() throws Exception { ByteBuffer writeBuffer = ByteBuffer.wrap(CONTENT_AS_BYTES); writeOnlyFileChannel.write(writeBuffer); writeOnlyFileChannel.force(true); byte[] readBuffer = new byte[CONTENT_AS_BYTES_LENGTH]; fis = new FileInputStream(fileOfWriteOnlyFileChannel); fis.read(readBuffer); assertTrue(Arrays.equals(CONTENT_AS_BYTES, readBuffer)); writeOnlyFileChannel.write(writeBuffer); writeOnlyFileChannel.force(false); fis.close(); readBuffer = new byte[CONTENT_AS_BYTES_LENGTH]; fis = new FileInputStream(fileOfWriteOnlyFileChannel); fis.read(readBuffer); assertTrue(Arrays.equals(CONTENT_AS_BYTES, readBuffer)); fis.close(); } /** * Initializes test file. * * @param file * @throws FileNotFoundException * @throws IOException */ private void writeDataToFile(File file) throws FileNotFoundException, IOException { FileOutputStream fos = new FileOutputStream(file); try { fos.write(CONTENT_AS_BYTES); } finally { fos.close(); } } /** * Initializes large test file. * * @param file the file to be written * @param size the content size to be written * @throws FileNotFoundException * @throws IOException */ private void writeLargeDataToFile(File file, int size) throws FileNotFoundException, IOException { FileOutputStream fos = new FileOutputStream(file); byte[] buf = new byte[size]; try { // we don't care about content - just need a particular file size fos.write(buf); } finally { fos.close(); } } public void test_tryLockJJZ_IllegalArgument() throws Exception { try { writeOnlyFileChannel.tryLock(0, -1, false); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { // expected } try { writeOnlyFileChannel.tryLock(-1, 0, false); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { // expected } try { readWriteFileChannel.tryLock(-1, -1, false); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { // expected } try { readWriteFileChannel.tryLock(Long.MAX_VALUE, 1, false); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { // expected } } public void testTryLockVeryLarge() throws IOException { long tooBig = Integer.MAX_VALUE + 1L; FileLock lock = readWriteFileChannel.tryLock(tooBig, 1, false); assertLockFails(tooBig, 1); lock.release(); lock = readWriteFileChannel.tryLock(0, tooBig, false); assertLockFails(0, 1); lock.release(); } public void testTryLockOverlapping() throws IOException { FileLock lockOne = readWriteFileChannel.tryLock(0, 10, false); FileLock lockTwo = readWriteFileChannel.tryLock(10, 20, false); assertLockFails(0, 10); lockOne.release(); assertLockFails(5, 10); lockOne = readWriteFileChannel.tryLock(0, 10, false); lockTwo.release(); lockOne.release(); } public void test_readLByteBufferJ_IllegalArgument() throws Exception { ByteBuffer readBuffer = ByteBuffer.allocate(CAPACITY); try { readOnlyFileChannel.read(readBuffer, -1); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { // expected } try { writeOnlyFileChannel.read(readBuffer, -1); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { // expected } try { readWriteFileChannel.read(readBuffer, -1); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { // expected } // throws IllegalArgumentException first. readOnlyFileChannel.close(); try { readOnlyFileChannel.read(readBuffer, -1); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { // expected } writeOnlyFileChannel.close(); try { writeOnlyFileChannel.read(readBuffer, -1); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { // expected } readWriteFileChannel.close(); try { readWriteFileChannel.read(readBuffer, -1); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { // expected } } public void test_read$LByteBufferII_Null() throws Exception { try { readOnlyFileChannel.read(null, 0, 1); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } try { readOnlyFileChannel.read(null, 0, 3); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } try { readOnlyFileChannel.read(null, 1, 2); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } try { readOnlyFileChannel.read(null, 2, 1); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } try { readOnlyFileChannel.read(null, 3, 0); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } try { writeOnlyFileChannel.read(null, 0, 1); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } try { writeOnlyFileChannel.read(null, 0, 3); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } try { writeOnlyFileChannel.read(null, 1, 2); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } try { writeOnlyFileChannel.read(null, 2, 1); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } try { writeOnlyFileChannel.read(null, 3, 0); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } try { readWriteFileChannel.read(null, 0, 1); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } try { readWriteFileChannel.read(null, 0, 3); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } try { readWriteFileChannel.read(null, 1, 2); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } try { readWriteFileChannel.read(null, 2, 1); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } try { readWriteFileChannel.read(null, 3, 0); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } // first throws NullPointerException readOnlyFileChannel.close(); try { readOnlyFileChannel.read(null, 0, 1); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } try { readOnlyFileChannel.read(null, 0, 3); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } try { readOnlyFileChannel.read(null, 1, 2); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } try { readOnlyFileChannel.read(null, 2, 1); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } try { readOnlyFileChannel.read(null, 3, 0); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } readWriteFileChannel.close(); try { readWriteFileChannel.read(null, 0, 1); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } try { readWriteFileChannel.read(null, 0, 3); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } try { readWriteFileChannel.read(null, 1, 2); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } try { readWriteFileChannel.read(null, 2, 1); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } try { readWriteFileChannel.read(null, 3, 0); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } writeOnlyFileChannel.close(); try { writeOnlyFileChannel.read(null, 0, 1); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } try { writeOnlyFileChannel.read(null, 0, 3); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } try { writeOnlyFileChannel.read(null, 1, 2); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } try { writeOnlyFileChannel.read(null, 2, 1); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } try { writeOnlyFileChannel.read(null, 3, 0); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } } private void doTestForIOOBException(FileChannel channel, ByteBuffer[] buffer) throws IOException{ try { channel.read(buffer, -1, 0); fail("should throw IndexOutOfBoundException"); } catch (IndexOutOfBoundsException e) { // expected } try { channel.read(buffer, 0, -1); fail("should throw IndexOutOfBoundException"); } catch (IndexOutOfBoundsException e) { // expected } try { channel.read(buffer, 0, 3); fail("should throw IndexOutOfBoundException"); } catch (IndexOutOfBoundsException e) { // expected } try { channel.read(buffer, 1, 2); fail("should throw IndexOutOfBoundException"); } catch (IndexOutOfBoundsException e) { // expected } try { channel.read(buffer, 2, 1); fail("should throw IndexOutOfBoundException"); } catch (IndexOutOfBoundsException e) { // expected } try { channel.read(buffer, 3, 0); fail("should throw IndexOutOfBoundException"); } catch (IndexOutOfBoundsException e) { // expected } } public void test_read$LByteBufferII_IndexOutOfBound() throws Exception { ByteBuffer[] readBuffers = new ByteBuffer[2]; readBuffers[0] = ByteBuffer.allocate(CAPACITY); readBuffers[1] = ByteBuffer.allocate(CAPACITY); ByteBuffer[] readBuffersNull = new ByteBuffer[2]; doTestForIOOBException(readOnlyFileChannel, readBuffers); doTestForIOOBException(readWriteFileChannel, readBuffers); doTestForIOOBException(writeOnlyFileChannel, readBuffers); doTestForIOOBException(readOnlyFileChannel, readBuffersNull); doTestForIOOBException(readWriteFileChannel, readBuffersNull); doTestForIOOBException(writeOnlyFileChannel, readBuffersNull); try { readOnlyFileChannel.read(null, -1, 0); fail("should throw IndexOutOfBoundException"); } catch (NullPointerException expected) { } catch (IndexOutOfBoundsException expected) { } try { readOnlyFileChannel.read(null, 0, -1); fail("should throw IndexOutOfBoundException"); } catch (NullPointerException expected) { } catch (IndexOutOfBoundsException expected) { } try { readWriteFileChannel.read(null, -1, 0); fail("should throw IndexOutOfBoundException"); } catch (NullPointerException expected) { } catch (IndexOutOfBoundsException expected) { } try { readWriteFileChannel.read(null, 0, -1); fail("should throw IndexOutOfBoundException"); } catch (NullPointerException expected) { } catch (IndexOutOfBoundsException expected) { } try { writeOnlyFileChannel.read(null, -1, 0); fail("should throw IndexOutOfBoundException"); } catch (NullPointerException expected) { } catch (IndexOutOfBoundsException expected) { } try { writeOnlyFileChannel.read(null, 0, -1); fail("should throw IndexOutOfBoundException"); } catch (NullPointerException expected) { } catch (IndexOutOfBoundsException expected) { } readOnlyFileChannel.close(); doTestForIOOBException(readOnlyFileChannel, readBuffers); doTestForIOOBException(readOnlyFileChannel, readBuffersNull); readWriteFileChannel.close(); doTestForIOOBException(readWriteFileChannel, readBuffers); doTestForIOOBException(readWriteFileChannel, readBuffersNull); writeOnlyFileChannel.close(); doTestForIOOBException(writeOnlyFileChannel, readBuffers); doTestForIOOBException(writeOnlyFileChannel, readBuffersNull); } public void test_read$LByteBufferII_EmptyFile() throws Exception { ByteBuffer[] readBuffers = new ByteBuffer[2]; readBuffers[0] = ByteBuffer.allocate(CAPACITY); readBuffers[1] = ByteBuffer.allocate(CAPACITY); long result = readOnlyFileChannel.read(readBuffers, 0, 2); assertEquals(-1, result); assertEquals(0, readBuffers[0].position()); assertEquals(0, readBuffers[1].position()); } public void test_read$LByteBufferII_EmptyBuffers() throws Exception { ByteBuffer[] readBuffers = new ByteBuffer[2]; readBuffers[0] = ByteBuffer.allocate(CAPACITY); try { readOnlyFileChannel.read(readBuffers, 0, 2); } catch (NullPointerException e) { // expected } writeDataToFile(fileOfReadOnlyFileChannel); readBuffers[0] = ByteBuffer.allocate(CAPACITY); try { readOnlyFileChannel.read(readBuffers, 0, 2); } catch (NullPointerException e) { // expected } long result = readOnlyFileChannel.read(readBuffers, 0, 1); assertEquals(CONTENT_AS_BYTES_LENGTH, result); } public void test_isOpen() throws Exception { // Regression for HARMONY-40 File logFile = File.createTempFile("out", "tmp"); logFile.deleteOnExit(); FileOutputStream out = new FileOutputStream(logFile, true); FileChannel channel = out.getChannel(); out.write(1); assertTrue("Assert 0: Channel is not open", channel.isOpen()); out.close(); assertFalse("Assert 0: Channel is still open", channel.isOpen()); } public void test_writeLByteBuffer_Closed() throws Exception { ByteBuffer writeBuffer = ByteBuffer.allocate(CAPACITY); readOnlyFileChannel.close(); try { readOnlyFileChannel.write(writeBuffer); fail("should throw ClosedChannelException"); } catch (ClosedChannelException e) { // expected } writeOnlyFileChannel.close(); try { writeOnlyFileChannel.write(writeBuffer); fail("should throw ClosedChannelException"); } catch (ClosedChannelException e) { // expected } readWriteFileChannel.close(); try { readWriteFileChannel.write(writeBuffer); fail("should throw ClosedChannelException"); } catch (ClosedChannelException e) { // expected } // should throw ClosedChannelException first try { readWriteFileChannel.read((ByteBuffer) null); fail("should throw ClosedChannelException"); } catch (NullPointerException e) { } catch (ClosedChannelException e) { } try { readOnlyFileChannel.write((ByteBuffer) null); fail("should throw ClosedChannelException"); } catch (NullPointerException e) { } catch (ClosedChannelException e) { } writeOnlyFileChannel.close(); try { writeOnlyFileChannel.write((ByteBuffer) null); fail("should throw ClosedChannelException"); } catch (NullPointerException e) { } catch (ClosedChannelException e) { } } public void test_writeLByteBufferJ_Postion_As_Long() throws Exception { ByteBuffer writeBuffer = ByteBuffer.wrap(TEST_BYTES); try { writeOnlyFileChannel.write(writeBuffer, Long.MAX_VALUE); } catch (IOException e) { // expected } } public void test_writeLByteBufferJ_IllegalArgument() throws Exception { ByteBuffer writeBuffer = ByteBuffer.allocate(CAPACITY); try { readOnlyFileChannel.write(writeBuffer, -1); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { // expected } try { writeOnlyFileChannel.write(writeBuffer, -1); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { // expected } try { readWriteFileChannel.write(writeBuffer, -1); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { // expected } // throws IllegalArgumentException first. readOnlyFileChannel.close(); try { readOnlyFileChannel.write(writeBuffer, -1); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { // expected } writeOnlyFileChannel.close(); try { writeOnlyFileChannel.write(writeBuffer, -1); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { // expected } readWriteFileChannel.close(); try { readWriteFileChannel.write(writeBuffer, -1); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { // expected } } public void test_writeLByteBufferJ_NonZeroPosition() throws Exception { final int pos = 5; ByteBuffer writeBuffer = ByteBuffer.wrap(CONTENT_AS_BYTES); writeBuffer.position(pos); int result = writeOnlyFileChannel.write(writeBuffer, pos); assertEquals(CONTENT_AS_BYTES_LENGTH - pos, result); assertEquals(CONTENT_AS_BYTES_LENGTH, writeBuffer.position()); writeOnlyFileChannel.close(); assertEquals(CONTENT_AS_BYTES_LENGTH, fileOfWriteOnlyFileChannel .length()); fis = new FileInputStream(fileOfWriteOnlyFileChannel); byte[] inputBuffer = new byte[CONTENT_AS_BYTES_LENGTH - pos]; fis.skip(pos); fis.read(inputBuffer); String test = CONTENT.substring(pos); assertTrue(Arrays.equals(test.getBytes(), inputBuffer)); } public void test_write$LByteBuffer_Closed() throws Exception { ByteBuffer[] writeBuffers = new ByteBuffer[2]; writeBuffers[0] = ByteBuffer.allocate(CAPACITY); writeBuffers[1] = ByteBuffer.allocate(CAPACITY); readOnlyFileChannel.close(); try { readOnlyFileChannel.write(writeBuffers); fail("should throw ClosedChannelException"); } catch (ClosedChannelException e) { // expected } writeOnlyFileChannel.close(); try { writeOnlyFileChannel.write(writeBuffers); fail("should throw ClosedChannelException"); } catch (ClosedChannelException e) { // expected } readWriteFileChannel.close(); try { readWriteFileChannel.write(writeBuffers); fail("should throw ClosedChannelException"); } catch (ClosedChannelException e) { // expected } } public void test_write$LByteBuffer_ReadOnly() throws Exception { ByteBuffer[] writeBuffers = new ByteBuffer[2]; writeBuffers[0] = ByteBuffer.allocate(CAPACITY); writeBuffers[1] = ByteBuffer.allocate(CAPACITY); try { readOnlyFileChannel.write(writeBuffers); fail("should throw NonWritableChannelException"); } catch (NonWritableChannelException e) { // expected } } public void test_write$LByteBuffer_EmptyBuffers() throws Exception { ByteBuffer[] writeBuffers = new ByteBuffer[2]; writeBuffers[0] = ByteBuffer.allocate(this.CONTENT_LENGTH); try { writeOnlyFileChannel.write(writeBuffers); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } try { readWriteFileChannel.write(writeBuffers); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } } public void test_write$LByteBuffer() throws Exception { ByteBuffer[] writeBuffers = new ByteBuffer[2]; writeBuffers[0] = ByteBuffer.wrap(CONTENT_AS_BYTES); writeBuffers[1] = ByteBuffer.wrap(CONTENT_AS_BYTES); long result = writeOnlyFileChannel.write(writeBuffers); assertEquals(CONTENT_AS_BYTES_LENGTH * 2, result); assertEquals(CONTENT_AS_BYTES_LENGTH, writeBuffers[0].position()); assertEquals(CONTENT_AS_BYTES_LENGTH, writeBuffers[1].position()); writeOnlyFileChannel.close(); assertEquals(CONTENT_AS_BYTES_LENGTH * 2, fileOfWriteOnlyFileChannel .length()); fis = new FileInputStream(fileOfWriteOnlyFileChannel); byte[] inputBuffer = new byte[CONTENT_AS_BYTES_LENGTH]; fis.read(inputBuffer); byte[] expectedResult = new byte[CONTENT_AS_BYTES_LENGTH * 2]; System.arraycopy(CONTENT_AS_BYTES, 0, expectedResult, 0, CONTENT_AS_BYTES_LENGTH); System.arraycopy(CONTENT_AS_BYTES, 0, expectedResult, CONTENT_AS_BYTES_LENGTH, CONTENT_AS_BYTES_LENGTH); assertTrue(Arrays.equals(CONTENT_AS_BYTES, inputBuffer)); } public void test_write$LByteBufferII_Null() throws Exception { ByteBuffer[] writeBuffers = null; try { readOnlyFileChannel.write(writeBuffers, 1, 2); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } try { writeOnlyFileChannel.write(writeBuffers, 1, 2); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } try { readWriteFileChannel.write(writeBuffers, 1, 2); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } // first throws NullPointerException readOnlyFileChannel.close(); try { readOnlyFileChannel.write(writeBuffers, 1, 2); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } writeOnlyFileChannel.close(); try { writeOnlyFileChannel.write(writeBuffers, 1, 2); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } readWriteFileChannel.close(); try { readWriteFileChannel.write(writeBuffers, 1, 2); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } } public void test_write$LByteBufferII_IndexOutOfBound() throws Exception { ByteBuffer[] writeBuffers = new ByteBuffer[2]; writeBuffers[0] = ByteBuffer.allocate(this.CONTENT_LENGTH); writeBuffers[1] = ByteBuffer.allocate(this.CONTENT_LENGTH); try { writeOnlyFileChannel.write(writeBuffers, -1, 0); fail("should throw IndexOutOfBoundsException"); } catch (IndexOutOfBoundsException e) { // expected } try { writeOnlyFileChannel.write(writeBuffers, 0, -1); fail("should throw IndexOutOfBoundsException"); } catch (IndexOutOfBoundsException e) { // expected } try { writeOnlyFileChannel.write(writeBuffers, 0, 3); fail("should throw IndexOutOfBoundsException"); } catch (IndexOutOfBoundsException e) { // expected } try { writeOnlyFileChannel.write(writeBuffers, 1, 2); fail("should throw IndexOutOfBoundsException"); } catch (IndexOutOfBoundsException e) { // expected } try { writeOnlyFileChannel.write(writeBuffers, 2, 1); fail("should throw IndexOutOfBoundsException"); } catch (IndexOutOfBoundsException e) { // expected } try { writeOnlyFileChannel.write(writeBuffers, 3, 0); fail("should throw IndexOutOfBoundsException"); } catch (IndexOutOfBoundsException e) { // expected } try { readWriteFileChannel.write(writeBuffers, -1, 0); fail("should throw IndexOutOfBoundsException"); } catch (IndexOutOfBoundsException e) { // expected } try { readWriteFileChannel.write(writeBuffers, 0, -1); fail("should throw IndexOutOfBoundsException"); } catch (IndexOutOfBoundsException e) { // expected } try { readWriteFileChannel.write(writeBuffers, 0, 3); fail("should throw IndexOutOfBoundsException"); } catch (IndexOutOfBoundsException e) { // expected } try { readWriteFileChannel.write(writeBuffers, 1, 2); fail("should throw IndexOutOfBoundsException"); } catch (IndexOutOfBoundsException e) { // expected } try { readWriteFileChannel.write(writeBuffers, 2, 1); fail("should throw IndexOutOfBoundsException"); } catch (IndexOutOfBoundsException e) { // expected } try { readWriteFileChannel.write(writeBuffers, 3, 0); fail("should throw IndexOutOfBoundsException"); } catch (IndexOutOfBoundsException e) { // expected } try { readOnlyFileChannel.write(writeBuffers, -1, 0); fail("should throw IndexOutOfBoundsException"); } catch (IndexOutOfBoundsException e) { // expected } try { readOnlyFileChannel.write(writeBuffers, 0, -1); fail("should throw IndexOutOfBoundsException"); } catch (IndexOutOfBoundsException e) { // expected } try { readOnlyFileChannel.write(writeBuffers, 0, 3); fail("should throw IndexOutOfBoundsException"); } catch (IndexOutOfBoundsException e) { // expected } try { readOnlyFileChannel.write(writeBuffers, 1, 2); fail("should throw IndexOutOfBoundsException"); } catch (IndexOutOfBoundsException e) { // expected } try { readOnlyFileChannel.write(writeBuffers, 2, 1); fail("should throw IndexOutOfBoundsException"); } catch (IndexOutOfBoundsException e) { // expected } try { readOnlyFileChannel.write(writeBuffers, 3, 0); fail("should throw IndexOutOfBoundsException"); } catch (IndexOutOfBoundsException e) { // expected } } public void test_write$LByteBufferII_EmptyBuffers() throws Exception { ByteBuffer[] writeBuffers = new ByteBuffer[2]; writeBuffers[0] = ByteBuffer.allocate(this.CONTENT_LENGTH); try { writeOnlyFileChannel.write(writeBuffers, 0, 2); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } try { readWriteFileChannel.write(writeBuffers, 0, 2); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } } public void test_transferToJJLWritableByteChannel_IllegalArgument() throws Exception { WritableByteChannel writableByteChannel = DatagramChannel.open(); try { readOnlyFileChannel.transferTo(10, -1, writableByteChannel); fail("should throw IllegalArgumentException."); } catch (IllegalArgumentException e) { // expected } try { readWriteFileChannel.transferTo(-1, 10, writableByteChannel); fail("should throw IllegalArgumentException."); } catch (IllegalArgumentException e) { // expected } } private void assertLockFails(long position, long size) throws IOException { try { readWriteFileChannel.tryLock(position, size, false); fail(); } catch (OverlappingFileLockException expected) { } } }
googleapis/google-cloud-java
36,424
java-texttospeech/proto-google-cloud-texttospeech-v1beta1/src/main/java/com/google/cloud/texttospeech/v1beta1/Voice.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/texttospeech/v1beta1/cloud_tts.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.texttospeech.v1beta1; /** * * * <pre> * Description of a voice supported by the TTS service. * </pre> * * Protobuf type {@code google.cloud.texttospeech.v1beta1.Voice} */ public final class Voice extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.texttospeech.v1beta1.Voice) VoiceOrBuilder { private static final long serialVersionUID = 0L; // Use Voice.newBuilder() to construct. private Voice(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Voice() { languageCodes_ = com.google.protobuf.LazyStringArrayList.emptyList(); name_ = ""; ssmlGender_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new Voice(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.texttospeech.v1beta1.TextToSpeechProto .internal_static_google_cloud_texttospeech_v1beta1_Voice_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.texttospeech.v1beta1.TextToSpeechProto .internal_static_google_cloud_texttospeech_v1beta1_Voice_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.texttospeech.v1beta1.Voice.class, com.google.cloud.texttospeech.v1beta1.Voice.Builder.class); } public static final int LANGUAGE_CODES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private com.google.protobuf.LazyStringArrayList languageCodes_ = com.google.protobuf.LazyStringArrayList.emptyList(); /** * * * <pre> * The languages that this voice supports, expressed as * [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tags (e.g. * "en-US", "es-419", "cmn-tw"). * </pre> * * <code>repeated string language_codes = 1;</code> * * @return A list containing the languageCodes. */ public com.google.protobuf.ProtocolStringList getLanguageCodesList() { return languageCodes_; } /** * * * <pre> * The languages that this voice supports, expressed as * [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tags (e.g. * "en-US", "es-419", "cmn-tw"). * </pre> * * <code>repeated string language_codes = 1;</code> * * @return The count of languageCodes. */ public int getLanguageCodesCount() { return languageCodes_.size(); } /** * * * <pre> * The languages that this voice supports, expressed as * [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tags (e.g. * "en-US", "es-419", "cmn-tw"). * </pre> * * <code>repeated string language_codes = 1;</code> * * @param index The index of the element to return. * @return The languageCodes at the given index. */ public java.lang.String getLanguageCodes(int index) { return languageCodes_.get(index); } /** * * * <pre> * The languages that this voice supports, expressed as * [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tags (e.g. * "en-US", "es-419", "cmn-tw"). * </pre> * * <code>repeated string language_codes = 1;</code> * * @param index The index of the value to return. * @return The bytes of the languageCodes at the given index. */ public com.google.protobuf.ByteString getLanguageCodesBytes(int index) { return languageCodes_.getByteString(index); } public static final int NAME_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * The name of this voice. Each distinct voice has a unique name. * </pre> * * <code>string name = 2;</code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * The name of this voice. Each distinct voice has a unique name. * </pre> * * <code>string name = 2;</code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int SSML_GENDER_FIELD_NUMBER = 3; private int ssmlGender_ = 0; /** * * * <pre> * The gender of this voice. * </pre> * * <code>.google.cloud.texttospeech.v1beta1.SsmlVoiceGender ssml_gender = 3;</code> * * @return The enum numeric value on the wire for ssmlGender. */ @java.lang.Override public int getSsmlGenderValue() { return ssmlGender_; } /** * * * <pre> * The gender of this voice. * </pre> * * <code>.google.cloud.texttospeech.v1beta1.SsmlVoiceGender ssml_gender = 3;</code> * * @return The ssmlGender. */ @java.lang.Override public com.google.cloud.texttospeech.v1beta1.SsmlVoiceGender getSsmlGender() { com.google.cloud.texttospeech.v1beta1.SsmlVoiceGender result = com.google.cloud.texttospeech.v1beta1.SsmlVoiceGender.forNumber(ssmlGender_); return result == null ? com.google.cloud.texttospeech.v1beta1.SsmlVoiceGender.UNRECOGNIZED : result; } public static final int NATURAL_SAMPLE_RATE_HERTZ_FIELD_NUMBER = 4; private int naturalSampleRateHertz_ = 0; /** * * * <pre> * The natural sample rate (in hertz) for this voice. * </pre> * * <code>int32 natural_sample_rate_hertz = 4;</code> * * @return The naturalSampleRateHertz. */ @java.lang.Override public int getNaturalSampleRateHertz() { return naturalSampleRateHertz_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < languageCodes_.size(); i++) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, languageCodes_.getRaw(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, name_); } if (ssmlGender_ != com.google.cloud.texttospeech.v1beta1.SsmlVoiceGender.SSML_VOICE_GENDER_UNSPECIFIED .getNumber()) { output.writeEnum(3, ssmlGender_); } if (naturalSampleRateHertz_ != 0) { output.writeInt32(4, naturalSampleRateHertz_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; { int dataSize = 0; for (int i = 0; i < languageCodes_.size(); i++) { dataSize += computeStringSizeNoTag(languageCodes_.getRaw(i)); } size += dataSize; size += 1 * getLanguageCodesList().size(); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, name_); } if (ssmlGender_ != com.google.cloud.texttospeech.v1beta1.SsmlVoiceGender.SSML_VOICE_GENDER_UNSPECIFIED .getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(3, ssmlGender_); } if (naturalSampleRateHertz_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(4, naturalSampleRateHertz_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.texttospeech.v1beta1.Voice)) { return super.equals(obj); } com.google.cloud.texttospeech.v1beta1.Voice other = (com.google.cloud.texttospeech.v1beta1.Voice) obj; if (!getLanguageCodesList().equals(other.getLanguageCodesList())) return false; if (!getName().equals(other.getName())) return false; if (ssmlGender_ != other.ssmlGender_) return false; if (getNaturalSampleRateHertz() != other.getNaturalSampleRateHertz()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getLanguageCodesCount() > 0) { hash = (37 * hash) + LANGUAGE_CODES_FIELD_NUMBER; hash = (53 * hash) + getLanguageCodesList().hashCode(); } hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (37 * hash) + SSML_GENDER_FIELD_NUMBER; hash = (53 * hash) + ssmlGender_; hash = (37 * hash) + NATURAL_SAMPLE_RATE_HERTZ_FIELD_NUMBER; hash = (53 * hash) + getNaturalSampleRateHertz(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.texttospeech.v1beta1.Voice parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.texttospeech.v1beta1.Voice parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.texttospeech.v1beta1.Voice parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.texttospeech.v1beta1.Voice parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.texttospeech.v1beta1.Voice parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.texttospeech.v1beta1.Voice parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.texttospeech.v1beta1.Voice parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.texttospeech.v1beta1.Voice parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.texttospeech.v1beta1.Voice parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.texttospeech.v1beta1.Voice parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.texttospeech.v1beta1.Voice parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.texttospeech.v1beta1.Voice parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.texttospeech.v1beta1.Voice prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Description of a voice supported by the TTS service. * </pre> * * Protobuf type {@code google.cloud.texttospeech.v1beta1.Voice} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.texttospeech.v1beta1.Voice) com.google.cloud.texttospeech.v1beta1.VoiceOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.texttospeech.v1beta1.TextToSpeechProto .internal_static_google_cloud_texttospeech_v1beta1_Voice_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.texttospeech.v1beta1.TextToSpeechProto .internal_static_google_cloud_texttospeech_v1beta1_Voice_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.texttospeech.v1beta1.Voice.class, com.google.cloud.texttospeech.v1beta1.Voice.Builder.class); } // Construct using com.google.cloud.texttospeech.v1beta1.Voice.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; languageCodes_ = com.google.protobuf.LazyStringArrayList.emptyList(); name_ = ""; ssmlGender_ = 0; naturalSampleRateHertz_ = 0; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.texttospeech.v1beta1.TextToSpeechProto .internal_static_google_cloud_texttospeech_v1beta1_Voice_descriptor; } @java.lang.Override public com.google.cloud.texttospeech.v1beta1.Voice getDefaultInstanceForType() { return com.google.cloud.texttospeech.v1beta1.Voice.getDefaultInstance(); } @java.lang.Override public com.google.cloud.texttospeech.v1beta1.Voice build() { com.google.cloud.texttospeech.v1beta1.Voice result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.texttospeech.v1beta1.Voice buildPartial() { com.google.cloud.texttospeech.v1beta1.Voice result = new com.google.cloud.texttospeech.v1beta1.Voice(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.texttospeech.v1beta1.Voice result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { languageCodes_.makeImmutable(); result.languageCodes_ = languageCodes_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.name_ = name_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.ssmlGender_ = ssmlGender_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.naturalSampleRateHertz_ = naturalSampleRateHertz_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.texttospeech.v1beta1.Voice) { return mergeFrom((com.google.cloud.texttospeech.v1beta1.Voice) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.texttospeech.v1beta1.Voice other) { if (other == com.google.cloud.texttospeech.v1beta1.Voice.getDefaultInstance()) return this; if (!other.languageCodes_.isEmpty()) { if (languageCodes_.isEmpty()) { languageCodes_ = other.languageCodes_; bitField0_ |= 0x00000001; } else { ensureLanguageCodesIsMutable(); languageCodes_.addAll(other.languageCodes_); } onChanged(); } if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000002; onChanged(); } if (other.ssmlGender_ != 0) { setSsmlGenderValue(other.getSsmlGenderValue()); } if (other.getNaturalSampleRateHertz() != 0) { setNaturalSampleRateHertz(other.getNaturalSampleRateHertz()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); ensureLanguageCodesIsMutable(); languageCodes_.add(s); break; } // case 10 case 18: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { ssmlGender_ = input.readEnum(); bitField0_ |= 0x00000004; break; } // case 24 case 32: { naturalSampleRateHertz_ = input.readInt32(); bitField0_ |= 0x00000008; break; } // case 32 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.protobuf.LazyStringArrayList languageCodes_ = com.google.protobuf.LazyStringArrayList.emptyList(); private void ensureLanguageCodesIsMutable() { if (!languageCodes_.isModifiable()) { languageCodes_ = new com.google.protobuf.LazyStringArrayList(languageCodes_); } bitField0_ |= 0x00000001; } /** * * * <pre> * The languages that this voice supports, expressed as * [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tags (e.g. * "en-US", "es-419", "cmn-tw"). * </pre> * * <code>repeated string language_codes = 1;</code> * * @return A list containing the languageCodes. */ public com.google.protobuf.ProtocolStringList getLanguageCodesList() { languageCodes_.makeImmutable(); return languageCodes_; } /** * * * <pre> * The languages that this voice supports, expressed as * [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tags (e.g. * "en-US", "es-419", "cmn-tw"). * </pre> * * <code>repeated string language_codes = 1;</code> * * @return The count of languageCodes. */ public int getLanguageCodesCount() { return languageCodes_.size(); } /** * * * <pre> * The languages that this voice supports, expressed as * [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tags (e.g. * "en-US", "es-419", "cmn-tw"). * </pre> * * <code>repeated string language_codes = 1;</code> * * @param index The index of the element to return. * @return The languageCodes at the given index. */ public java.lang.String getLanguageCodes(int index) { return languageCodes_.get(index); } /** * * * <pre> * The languages that this voice supports, expressed as * [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tags (e.g. * "en-US", "es-419", "cmn-tw"). * </pre> * * <code>repeated string language_codes = 1;</code> * * @param index The index of the value to return. * @return The bytes of the languageCodes at the given index. */ public com.google.protobuf.ByteString getLanguageCodesBytes(int index) { return languageCodes_.getByteString(index); } /** * * * <pre> * The languages that this voice supports, expressed as * [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tags (e.g. * "en-US", "es-419", "cmn-tw"). * </pre> * * <code>repeated string language_codes = 1;</code> * * @param index The index to set the value at. * @param value The languageCodes to set. * @return This builder for chaining. */ public Builder setLanguageCodes(int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureLanguageCodesIsMutable(); languageCodes_.set(index, value); bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The languages that this voice supports, expressed as * [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tags (e.g. * "en-US", "es-419", "cmn-tw"). * </pre> * * <code>repeated string language_codes = 1;</code> * * @param value The languageCodes to add. * @return This builder for chaining. */ public Builder addLanguageCodes(java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureLanguageCodesIsMutable(); languageCodes_.add(value); bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The languages that this voice supports, expressed as * [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tags (e.g. * "en-US", "es-419", "cmn-tw"). * </pre> * * <code>repeated string language_codes = 1;</code> * * @param values The languageCodes to add. * @return This builder for chaining. */ public Builder addAllLanguageCodes(java.lang.Iterable<java.lang.String> values) { ensureLanguageCodesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, languageCodes_); bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The languages that this voice supports, expressed as * [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tags (e.g. * "en-US", "es-419", "cmn-tw"). * </pre> * * <code>repeated string language_codes = 1;</code> * * @return This builder for chaining. */ public Builder clearLanguageCodes() { languageCodes_ = com.google.protobuf.LazyStringArrayList.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); ; onChanged(); return this; } /** * * * <pre> * The languages that this voice supports, expressed as * [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tags (e.g. * "en-US", "es-419", "cmn-tw"). * </pre> * * <code>repeated string language_codes = 1;</code> * * @param value The bytes of the languageCodes to add. * @return This builder for chaining. */ public Builder addLanguageCodesBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); ensureLanguageCodesIsMutable(); languageCodes_.add(value); bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object name_ = ""; /** * * * <pre> * The name of this voice. Each distinct voice has a unique name. * </pre> * * <code>string name = 2;</code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The name of this voice. Each distinct voice has a unique name. * </pre> * * <code>string name = 2;</code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The name of this voice. Each distinct voice has a unique name. * </pre> * * <code>string name = 2;</code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The name of this voice. Each distinct voice has a unique name. * </pre> * * <code>string name = 2;</code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The name of this voice. Each distinct voice has a unique name. * </pre> * * <code>string name = 2;</code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private int ssmlGender_ = 0; /** * * * <pre> * The gender of this voice. * </pre> * * <code>.google.cloud.texttospeech.v1beta1.SsmlVoiceGender ssml_gender = 3;</code> * * @return The enum numeric value on the wire for ssmlGender. */ @java.lang.Override public int getSsmlGenderValue() { return ssmlGender_; } /** * * * <pre> * The gender of this voice. * </pre> * * <code>.google.cloud.texttospeech.v1beta1.SsmlVoiceGender ssml_gender = 3;</code> * * @param value The enum numeric value on the wire for ssmlGender to set. * @return This builder for chaining. */ public Builder setSsmlGenderValue(int value) { ssmlGender_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The gender of this voice. * </pre> * * <code>.google.cloud.texttospeech.v1beta1.SsmlVoiceGender ssml_gender = 3;</code> * * @return The ssmlGender. */ @java.lang.Override public com.google.cloud.texttospeech.v1beta1.SsmlVoiceGender getSsmlGender() { com.google.cloud.texttospeech.v1beta1.SsmlVoiceGender result = com.google.cloud.texttospeech.v1beta1.SsmlVoiceGender.forNumber(ssmlGender_); return result == null ? com.google.cloud.texttospeech.v1beta1.SsmlVoiceGender.UNRECOGNIZED : result; } /** * * * <pre> * The gender of this voice. * </pre> * * <code>.google.cloud.texttospeech.v1beta1.SsmlVoiceGender ssml_gender = 3;</code> * * @param value The ssmlGender to set. * @return This builder for chaining. */ public Builder setSsmlGender(com.google.cloud.texttospeech.v1beta1.SsmlVoiceGender value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; ssmlGender_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * The gender of this voice. * </pre> * * <code>.google.cloud.texttospeech.v1beta1.SsmlVoiceGender ssml_gender = 3;</code> * * @return This builder for chaining. */ public Builder clearSsmlGender() { bitField0_ = (bitField0_ & ~0x00000004); ssmlGender_ = 0; onChanged(); return this; } private int naturalSampleRateHertz_; /** * * * <pre> * The natural sample rate (in hertz) for this voice. * </pre> * * <code>int32 natural_sample_rate_hertz = 4;</code> * * @return The naturalSampleRateHertz. */ @java.lang.Override public int getNaturalSampleRateHertz() { return naturalSampleRateHertz_; } /** * * * <pre> * The natural sample rate (in hertz) for this voice. * </pre> * * <code>int32 natural_sample_rate_hertz = 4;</code> * * @param value The naturalSampleRateHertz to set. * @return This builder for chaining. */ public Builder setNaturalSampleRateHertz(int value) { naturalSampleRateHertz_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * The natural sample rate (in hertz) for this voice. * </pre> * * <code>int32 natural_sample_rate_hertz = 4;</code> * * @return This builder for chaining. */ public Builder clearNaturalSampleRateHertz() { bitField0_ = (bitField0_ & ~0x00000008); naturalSampleRateHertz_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.texttospeech.v1beta1.Voice) } // @@protoc_insertion_point(class_scope:google.cloud.texttospeech.v1beta1.Voice) private static final com.google.cloud.texttospeech.v1beta1.Voice DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.texttospeech.v1beta1.Voice(); } public static com.google.cloud.texttospeech.v1beta1.Voice getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<Voice> PARSER = new com.google.protobuf.AbstractParser<Voice>() { @java.lang.Override public Voice parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<Voice> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<Voice> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.texttospeech.v1beta1.Voice getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
google/nomulus
36,554
core/src/main/java/google/registry/persistence/transaction/JpaTransactionManagerImpl.java
// Copyright 2019 The Nomulus Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package google.registry.persistence.transaction; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Throwables.throwIfUnchecked; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.ImmutableMap.toImmutableMap; import static com.google.common.collect.ImmutableSet.toImmutableSet; import static google.registry.config.RegistryConfig.getHibernateAllowNestedTransactions; import static google.registry.persistence.transaction.DatabaseException.throwIfSqlException; import static google.registry.util.PreconditionsUtils.checkArgumentNotNull; import static java.util.AbstractMap.SimpleEntry; import static java.util.stream.Collectors.joining; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableCollection; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Streams; import com.google.common.flogger.FluentLogger; import com.google.common.flogger.StackSize; import google.registry.model.ImmutableObject; import google.registry.persistence.JpaRetries; import google.registry.persistence.PersistenceModule.TransactionIsolationLevel; import google.registry.persistence.VKey; import google.registry.util.Clock; import google.registry.util.RegistryEnvironment; import google.registry.util.Retrier; import google.registry.util.SystemSleeper; import jakarta.persistence.CacheRetrieveMode; import jakarta.persistence.CacheStoreMode; import jakarta.persistence.EntityManager; import jakarta.persistence.EntityManagerFactory; import jakarta.persistence.EntityTransaction; import jakarta.persistence.FlushModeType; import jakarta.persistence.LockModeType; import jakarta.persistence.Parameter; import jakarta.persistence.PersistenceException; import jakarta.persistence.Query; import jakarta.persistence.TemporalType; import jakarta.persistence.TypedQuery; import jakarta.persistence.criteria.CriteriaQuery; import jakarta.persistence.metamodel.EntityType; import jakarta.persistence.metamodel.Metamodel; import java.io.Serializable; import java.lang.reflect.Array; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Calendar; import java.util.Collections; import java.util.Date; import java.util.IdentityHashMap; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import java.util.Optional; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Supplier; import java.util.stream.Stream; import java.util.stream.StreamSupport; import javax.annotation.Nullable; import org.hibernate.Session; import org.hibernate.SessionFactory; import org.hibernate.cfg.Environment; import org.joda.time.DateTime; /** Implementation of {@link JpaTransactionManager} for JPA compatible database. */ public class JpaTransactionManagerImpl implements JpaTransactionManager { private static final FluentLogger logger = FluentLogger.forEnclosingClass(); private static final Retrier retrier = new Retrier(new SystemSleeper(), 6); private static final String NESTED_TRANSACTION_MESSAGE = "Nested transaction detected. Try refactoring to avoid nested transactions. If unachievable," + " use reTransact() in nested transactions"; private static final String SQL_STATEMENT_LOG_SENTINEL_FORMAT = "SQL_STATEMENT_LOG: %s"; // EntityManagerFactory is thread safe. private final EntityManagerFactory emf; private final Clock clock; private final boolean readOnly; private static final ThreadLocal<TransactionInfo> transactionInfo = ThreadLocal.withInitial(TransactionInfo::new); public JpaTransactionManagerImpl(EntityManagerFactory emf, Clock clock, boolean readOnly) { this.emf = emf; this.clock = clock; this.readOnly = readOnly; } public JpaTransactionManagerImpl(EntityManagerFactory emf, Clock clock) { this(emf, clock, false); } @Override public void teardown() { emf.close(); } @Override public EntityManager getStandaloneEntityManager() { return emf.createEntityManager(); } @Override public Metamodel getMetaModel() { return this.emf.getMetamodel(); } @Override public EntityManager getEntityManager() { assertInTransaction(); return transactionInfo.get().entityManager; } @Override public <T> TypedQuery<T> query(String sqlString, Class<T> resultClass) { return new DetachingTypedQuery<>(getEntityManager().createQuery(sqlString, resultClass)); } @Override public <T> TypedQuery<T> criteriaQuery(CriteriaQuery<T> criteriaQuery) { return new DetachingTypedQuery<>(getEntityManager().createQuery(criteriaQuery)); } @Override public Query query(String sqlString) { return getEntityManager().createQuery(sqlString); } @Override public boolean inTransaction() { return transactionInfo.get().inTransaction; } @Override public long allocateId() { assertInTransaction(); return transactionInfo.get().idProvider.get(); } @Override public void assertInTransaction() { if (!inTransaction()) { throw new IllegalStateException("Not in a transaction"); } } @Override public <T> T reTransact(Callable<T> work) { // This prevents inner transaction from retrying, thus avoiding a cascade retry effect. if (inTransaction()) { return transactNoRetry(null, work); } return retrier.callWithRetry( () -> transactNoRetry(null, work), JpaRetries::isFailedTxnRetriable); } @Override public <T> T transact(TransactionIsolationLevel isolationLevel, Callable<T> work) { return transact(isolationLevel, work, false); } @Override public <T> T transact( TransactionIsolationLevel isolationLevel, Callable<T> work, boolean logSqlStatements) { if (inTransaction()) { if (!getHibernateAllowNestedTransactions()) { throw new IllegalStateException(NESTED_TRANSACTION_MESSAGE); } if (RegistryEnvironment.get() != RegistryEnvironment.UNITTEST) { logger.atWarning().withStackTrace(StackSize.MEDIUM).atMostEvery(1, TimeUnit.MINUTES).log( NESTED_TRANSACTION_MESSAGE); } // This prevents inner transaction from retrying, thus avoiding a cascade retry effect. return transactNoRetry(isolationLevel, work); } return retrier.callWithRetry( () -> transactNoRetry(isolationLevel, work, logSqlStatements), JpaRetries::isFailedTxnRetriable); } @Override public <T> T transact(Callable<T> work) { return transact(null, work); } @Override public <T> T transactNoRetry(Callable<T> work) { return transactNoRetry(null, work); } @Override public <T> T transactNoRetry( @Nullable TransactionIsolationLevel isolationLevel, Callable<T> work) { return transactNoRetry(isolationLevel, work, false); } @Override public <T> T transactNoRetry( @Nullable TransactionIsolationLevel isolationLevel, Callable<T> work, boolean logSqlStatements) { if (inTransaction()) { // This check will no longer be necessary when the transact() method always throws // inside a nested transaction, as the only way to pass a non-null isolation level // is by calling the transact() method (and its variants), which would have already // thrown before calling transactNoRetry() when inside a nested transaction. // // For now, we still need it, so we don't accidentally call a nested transact() with an // isolation level override. This buys us time to detect nested transact() calls and either // remove them or change the call site to reTransact(). if (isolationLevel != null) { throw new IllegalStateException( "Transaction isolation level cannot be specified for nested transactions"); } try { return work.call(); } catch (Exception e) { throwIfSqlException(e); throwIfUnchecked(e); throw new RuntimeException(e); } } TransactionInfo txnInfo = transactionInfo.get(); txnInfo.entityManager = logSqlStatements ? emf.unwrap(SessionFactory.class) .withOptions() .statementInspector( s -> { logger.atInfo().log(SQL_STATEMENT_LOG_SENTINEL_FORMAT, s); return s; }) .openSession() : emf.createEntityManager(); if (readOnly) { // Disable Hibernate's dirty object check on flushing, it has become more aggressive in v6. txnInfo.entityManager.unwrap(Session.class).setDefaultReadOnly(true); } EntityTransaction txn = txnInfo.entityManager.getTransaction(); try { txn.begin(); txnInfo.start(clock, readOnly ? ReplicaDbIdService::allocateId : this::fetchIdFromSequence); if (readOnly) { getEntityManager().createNativeQuery("SET TRANSACTION READ ONLY").executeUpdate(); logger.atInfo().log("Using read-only SQL replica"); } if (isolationLevel != null && isolationLevel != getDefaultTransactionIsolationLevel()) { getEntityManager() .createNativeQuery( String.format("SET TRANSACTION ISOLATION LEVEL %s", isolationLevel.getMode())) .executeUpdate(); logger.atInfo().log( "Overriding transaction isolation level from %s to %s", getDefaultTransactionIsolationLevel(), isolationLevel); } T result = work.call(); txn.commit(); return result; } catch (Throwable e) { // Catch a Throwable here so even Errors would lead to a rollback. try { txn.rollback(); logger.atWarning().log("Error during transaction; transaction rolled back."); } catch (Exception rollbackException) { logger.atSevere().withCause(rollbackException).log("Rollback failed; suppressing error."); } throwIfSqlException(e); throwIfUnchecked(e); throw new RuntimeException(e); } finally { txnInfo.clear(); } } @Override public void transact(TransactionIsolationLevel isolationLevel, ThrowingRunnable work) { transact( isolationLevel, () -> { work.run(); return null; }); } @Override public void transact(ThrowingRunnable work) { transact(null, work); } @Override public void reTransact(ThrowingRunnable work) { reTransact( () -> { work.run(); return null; }); } @Override public TransactionIsolationLevel getDefaultTransactionIsolationLevel() { return TransactionIsolationLevel.valueOf( (String) emf.getProperties().get(Environment.ISOLATION)); } @Override public TransactionIsolationLevel getCurrentTransactionIsolationLevel() { assertInTransaction(); String mode = (String) getEntityManager() .createNativeQuery("SHOW TRANSACTION ISOLATION LEVEL") .getSingleResult(); return TransactionIsolationLevel.fromMode(mode); } @Override public DateTime getTransactionTime() { assertInTransaction(); TransactionInfo txnInfo = transactionInfo.get(); if (txnInfo.transactionTime == null) { throw new PersistenceException("In a transaction but transactionTime is null"); } return txnInfo.transactionTime; } /** * Inserts an object into the database. * * <p>If {@code entity} has an auto-generated identity field (i.e., a field annotated with {@link * jakarta.persistence.GeneratedValue}), the caller must not assign a value to this field, * otherwise Hibernate would mistake the entity as detached and raise an error. * * <p>The practical implication of the above is that when inserting such an entity using a * retriable transaction , the entity should be instantiated inside the transaction body. A failed * attempt may still assign and ID to the entity, therefore reusing the same entity would cause * retries to fail. */ @Override public void insert(Object entity) { checkArgumentNotNull(entity, "entity must be specified"); assertInTransaction(); transactionInfo.get().insertObject(entity); } @Override public void insertAll(ImmutableCollection<?> entities) { checkArgumentNotNull(entities, "entities must be specified"); assertInTransaction(); entities.forEach(this::insert); } @Override public void insertAll(ImmutableObject... entities) { insertAll(ImmutableSet.copyOf(entities)); } @Override public void put(Object entity) { checkArgumentNotNull(entity, "entity must be specified"); assertInTransaction(); transactionInfo.get().updateObject(entity); } @Override public void putAll(ImmutableObject... entities) { checkArgumentNotNull(entities, "entities must be specified"); assertInTransaction(); for (Object entity : entities) { put(entity); } } @Override public void putAll(ImmutableCollection<?> entities) { checkArgumentNotNull(entities, "entities must be specified"); assertInTransaction(); entities.forEach(this::put); } @Override public void update(Object entity) { checkArgumentNotNull(entity, "entity must be specified"); assertInTransaction(); checkArgument(exists(entity), "Given entity does not exist"); transactionInfo.get().updateObject(entity); } @Override public void updateAll(ImmutableCollection<?> entities) { checkArgumentNotNull(entities, "entities must be specified"); assertInTransaction(); entities.forEach(this::update); } @Override public void updateAll(ImmutableObject... entities) { updateAll(ImmutableList.copyOf(entities)); } @Override public <T> boolean exists(VKey<T> key) { checkArgumentNotNull(key, "key must be specified"); EntityType<?> entityType = getEntityType(key.getKind()); ImmutableSet<EntityId> entityIds = getEntityIdsFromSqlKey(entityType, key.getKey()); return exists(entityType.getName(), entityIds); } @Override public boolean exists(Object entity) { checkArgumentNotNull(entity, "entity must be specified"); EntityType<?> entityType = getEntityType(entity.getClass()); ImmutableSet<EntityId> entityIds = getEntityIdsFromEntity(entityType, entity); return exists(entityType.getName(), entityIds); } private boolean exists(String entityName, ImmutableSet<EntityId> entityIds) { assertInTransaction(); TypedQuery<Integer> query = query( String.format("SELECT 1 FROM %s WHERE %s", entityName, getAndClause(entityIds)), Integer.class) .setMaxResults(1); entityIds.forEach(entityId -> query.setParameter(entityId.name, entityId.value)); return !query.getResultList().isEmpty(); } @Override public <T> Optional<T> loadByKeyIfPresent(VKey<T> key) { checkArgumentNotNull(key, "key must be specified"); assertInTransaction(); return Optional.ofNullable(getEntityManager().find(key.getKind(), key.getKey())) .map(this::detach); } @Override public <T> ImmutableMap<VKey<? extends T>, T> loadByKeysIfPresent( Iterable<? extends VKey<? extends T>> keys) { checkArgumentNotNull(keys, "keys must be specified"); assertInTransaction(); return StreamSupport.stream(keys.spliterator(), false) // Accept duplicate keys. .distinct() .map( key -> new SimpleEntry<VKey<? extends T>, T>( key, detach(getEntityManager().find(key.getKind(), key.getKey())))) .filter(entry -> entry.getValue() != null) .collect(toImmutableMap(Map.Entry::getKey, Map.Entry::getValue)); } @Override public <T> ImmutableList<T> loadByEntitiesIfPresent(Iterable<T> entities) { return Streams.stream(entities) .filter(this::exists) .map(this::loadByEntity) .collect(toImmutableList()); } @Override public <T> T loadByKey(VKey<T> key) { checkArgumentNotNull(key, "key must be specified"); assertInTransaction(); T result = getEntityManager().find(key.getKind(), key.getKey()); if (result == null) { throw new NoSuchElementException(key.toString()); } return detach(result); } @Override public <T> ImmutableMap<VKey<? extends T>, T> loadByKeys( Iterable<? extends VKey<? extends T>> keys) { ImmutableMap<VKey<? extends T>, T> existing = loadByKeysIfPresent(keys); ImmutableSet<? extends VKey<? extends T>> missingKeys = Streams.stream(keys).filter(k -> !existing.containsKey(k)).collect(toImmutableSet()); if (!missingKeys.isEmpty()) { throw new NoSuchElementException( String.format( "Expected to find the following VKeys but they were missing: %s.", missingKeys)); } return existing; } @Override public <T> T loadByEntity(T entity) { checkArgumentNotNull(entity, "entity must be specified"); assertInTransaction(); @SuppressWarnings("unchecked") T returnValue = (T) loadByKey( VKey.create( entity.getClass(), // Casting to Serializable is safe according to JPA (JSR 338 sec. 2.4). (Serializable) emf.getPersistenceUnitUtil().getIdentifier(entity))); return returnValue; } @Override public <T> ImmutableList<T> loadByEntities(Iterable<T> entities) { return Streams.stream(entities).map(this::loadByEntity).collect(toImmutableList()); } @Override public <T> ImmutableList<T> loadAllOf(Class<T> clazz) { return loadAllOfStream(clazz).collect(toImmutableList()); } @Override public <T> Stream<T> loadAllOfStream(Class<T> clazz) { checkArgumentNotNull(clazz, "clazz must be specified"); assertInTransaction(); return getEntityManager() .createQuery(String.format("FROM %s", getEntityType(clazz).getName()), clazz) .getResultStream() .map(this::detach); } @Override public <T> Optional<T> loadSingleton(Class<T> clazz) { assertInTransaction(); List<T> elements = getEntityManager() .createQuery(String.format("FROM %s", getEntityType(clazz).getName()), clazz) .setMaxResults(2) .getResultList(); checkArgument( elements.size() <= 1, "Expected at most one entity of type %s, found at least two", clazz.getSimpleName()); return elements.stream().findFirst().map(this::detach); } private int internalDelete(VKey<?> key) { checkArgumentNotNull(key, "key must be specified"); assertInTransaction(); EntityType<?> entityType = getEntityType(key.getKind()); ImmutableSet<EntityId> entityIds = getEntityIdsFromSqlKey(entityType, key.getKey()); String sql = String.format("DELETE FROM %s WHERE %s", entityType.getName(), getAndClause(entityIds)); Query query = query(sql); entityIds.forEach(entityId -> query.setParameter(entityId.name, entityId.value)); return query.executeUpdate(); } @Override public void delete(VKey<?> key) { internalDelete(key); } @Override public void delete(Iterable<? extends VKey<?>> vKeys) { checkArgumentNotNull(vKeys, "vKeys must be specified"); vKeys.forEach(this::internalDelete); } @Override public <T> T delete(T entity) { checkArgumentNotNull(entity, "entity must be specified"); assertInTransaction(); T managedEntity = entity; if (!getEntityManager().contains(entity)) { // We don't add the entity to "objectsToSave": once deleted, the object should never be // returned as a result of the query or lookup. managedEntity = getEntityManager().merge(entity); } getEntityManager().remove(managedEntity); return managedEntity; } @Override public <T> QueryComposer<T> createQueryComposer(Class<T> entity) { return new JpaQueryComposerImpl<>(entity); } @Override public <T> void assertDelete(VKey<T> key) { if (internalDelete(key) != 1) { throw new IllegalArgumentException( String.format("Error deleting the entity of the key: %s", key.getKey())); } } private <T> EntityType<T> getEntityType(Class<T> clazz) { return emf.getMetamodel().entity(clazz); } /** * A SQL Sequence based ID allocator that generates an ID from a monotonically increasing {@link * AtomicLong} * * <p>The generated IDs are project-wide unique. */ private long fetchIdFromSequence() { return (Long) getEntityManager() .createNativeQuery("SELECT nextval('project_wide_unique_id_seq')") .getSingleResult(); } private record EntityId(String name, Object value) {} private static ImmutableSet<EntityId> getEntityIdsFromEntity( EntityType<?> entityType, Object entity) { if (entityType.hasSingleIdAttribute()) { String idName = entityType.getId(entityType.getIdType().getJavaType()).getName(); Object idValue = getFieldValue(entity, idName); return ImmutableSet.of(new EntityId(idName, idValue)); } else { return getEntityIdsFromIdContainer(entityType, entity); } } private static ImmutableSet<EntityId> getEntityIdsFromSqlKey( EntityType<?> entityType, Object sqlKey) { if (entityType.hasSingleIdAttribute()) { String idName = entityType.getId(entityType.getIdType().getJavaType()).getName(); return ImmutableSet.of(new EntityId(idName, sqlKey)); } else { return getEntityIdsFromIdContainer(entityType, sqlKey); } } private static ImmutableSet<EntityId> getEntityIdsFromIdContainer( EntityType<?> entityType, Object idContainer) { return entityType.getIdClassAttributes().stream() .map( attribute -> { String idName = attribute.getName(); // The object may use either Java getters or field names to represent the ID object. // Attempt the Java getter, then fall back to the field name if that fails. String methodName = attribute.getJavaMember().getName(); Object idValue; try { Method method = idContainer.getClass().getDeclaredMethod(methodName); method.setAccessible(true); idValue = method.invoke(idContainer); } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) { idValue = getFieldValue(idContainer, idName); } return new EntityId(idName, idValue); }) .collect(toImmutableSet()); } private static String getAndClause(ImmutableSet<EntityId> entityIds) { return entityIds.stream() .map(entityId -> String.format("%s = :%s", entityId.name, entityId.name)) .collect(joining(" AND ")); } private static Object getFieldValue(Object object, String fieldName) { try { Field field = getField(object.getClass(), fieldName); field.setAccessible(true); return field.get(object); } catch (NoSuchFieldException | IllegalAccessException e) { throw new IllegalArgumentException(e); } } /** Gets the field definition from clazz or any superclass. */ private static Field getField(Class<?> clazz, String fieldName) throws NoSuchFieldException { try { // Note that we have to use getDeclaredField() for this, getField() just finds public fields. return clazz.getDeclaredField(fieldName); } catch (NoSuchFieldException e) { Class<?> base = clazz.getSuperclass(); if (base != null) { return getField(base, fieldName); } else { throw e; } } } @Nullable private <T> T detachIfEntity(@Nullable T object) { if (object == null) { return null; } // Check if the object is an array, if so we'll want to recurse through the elements. if (object.getClass().isArray()) { for (int i = 0; i < Array.getLength(object); ++i) { detachIfEntity(Array.get(object, i)); } return object; } // Check to see if it is an entity (queries can return raw column values or counts, so this // could be String, Long, ...). try { getEntityManager().getMetamodel().entity(object.getClass()); } catch (IllegalArgumentException e) { // The object is not an entity. Return without detaching. return object; } // At this point, object must be an entity. return detach(object); } /** Detach the entity, suitable for use in Optional.map(). */ @Nullable private <T> T detach(@Nullable T entity) { if (entity != null) { // If the entity was previously persisted or merged, we have to throw an exception. if (transactionInfo.get().willSave(entity)) { throw new IllegalStateException("Inserted/updated object reloaded: " + entity); } getEntityManager().detach(entity); } return entity; } private static class TransactionInfo { EntityManager entityManager; boolean inTransaction = false; DateTime transactionTime; Supplier<Long> idProvider; // The set of entity objects that have been either persisted (via insert()) or merged (via // put()/update()). If the entity manager returns these as a result of a find() or query // operation, we can not detach them -- detaching removes them from the transaction and causes // them to not be saved to the database -- so we throw an exception instead. Set<Object> objectsToSave = Collections.newSetFromMap(new IdentityHashMap<>()); /** Start a new transaction. */ private void start(Clock clock, Supplier<Long> idProvider) { checkArgumentNotNull(clock); inTransaction = true; transactionTime = clock.nowUtc(); this.idProvider = idProvider; } private void clear() { idProvider = null; inTransaction = false; transactionTime = null; objectsToSave = Collections.newSetFromMap(new IdentityHashMap<>()); if (entityManager != null) { // Close this EntityManager just let the connection pool be able to reuse it, it doesn't // close the underlying database connection. entityManager.close(); entityManager = null; } } /** Does the full "update" on an object including all internal housekeeping. */ private void updateObject(Object object) { Object merged = entityManager.merge(object); objectsToSave.add(merged); } /** Does the full "insert" on a new object including all internal housekeeping. */ private void insertObject(Object object) { entityManager.persist(object); objectsToSave.add(object); } /** Returns true if the object has been persisted/merged and will be saved on commit. */ private boolean willSave(Object object) { return objectsToSave.contains(object); } } /** * Typed query wrapper that applies a transform to all result objects. * * <p>This is used to detach objects upon load. */ @VisibleForTesting class DetachingTypedQuery<T> implements TypedQuery<T> { TypedQuery<T> delegate; DetachingTypedQuery(TypedQuery<T> delegate) { this.delegate = delegate; } @Override public Integer getTimeout() { return delegate.getTimeout(); } @Override public CacheRetrieveMode getCacheRetrieveMode() { return delegate.getCacheRetrieveMode(); } @Override public CacheStoreMode getCacheStoreMode() { return delegate.getCacheStoreMode(); } @Override public TypedQuery<T> setTimeout(Integer timeout) { delegate.setTimeout(timeout); return this; } @Override public TypedQuery<T> setCacheStoreMode(CacheStoreMode mode) { delegate.setCacheStoreMode(mode); return this; } @Override public TypedQuery<T> setCacheRetrieveMode(CacheRetrieveMode mode) { delegate.setCacheRetrieveMode(mode); return this; } @Override public T getSingleResultOrNull() { return delegate.getSingleResultOrNull(); } @Override public List<T> getResultList() { return delegate .getResultStream() .map(JpaTransactionManagerImpl.this::detachIfEntity) .collect(toImmutableList()); } @Override public Stream<T> getResultStream() { return delegate.getResultStream().map(JpaTransactionManagerImpl.this::detachIfEntity); } @Override public T getSingleResult() { return detachIfEntity(delegate.getSingleResult()); } @Override public TypedQuery<T> setMaxResults(int maxResults) { delegate.setMaxResults(maxResults); return this; } @Override public TypedQuery<T> setFirstResult(int startPosition) { delegate.setFirstResult(startPosition); return this; } @Override public TypedQuery<T> setHint(String hintName, Object value) { delegate.setHint(hintName, value); return this; } @Override public <U> TypedQuery<T> setParameter(Parameter<U> param, U value) { delegate.setParameter(param, value); return this; } @Override public TypedQuery<T> setParameter( Parameter<Calendar> param, Calendar value, TemporalType temporalType) { delegate.setParameter(param, value, temporalType); return this; } @Override public TypedQuery<T> setParameter( Parameter<Date> param, Date value, TemporalType temporalType) { delegate.setParameter(param, value, temporalType); return this; } @Override public TypedQuery<T> setParameter(String name, Object value) { delegate.setParameter(name, value); return this; } @Override public TypedQuery<T> setParameter(String name, Calendar value, TemporalType temporalType) { delegate.setParameter(name, value, temporalType); return this; } @Override public TypedQuery<T> setParameter(String name, Date value, TemporalType temporalType) { delegate.setParameter(name, value, temporalType); return this; } @Override public TypedQuery<T> setParameter(int position, Object value) { delegate.setParameter(position, value); return this; } @Override public TypedQuery<T> setParameter(int position, Calendar value, TemporalType temporalType) { delegate.setParameter(position, value, temporalType); return this; } @Override public TypedQuery<T> setParameter(int position, Date value, TemporalType temporalType) { delegate.setParameter(position, value, temporalType); return this; } @Override public TypedQuery<T> setFlushMode(FlushModeType flushMode) { delegate.setFlushMode(flushMode); return this; } @Override public TypedQuery<T> setLockMode(LockModeType lockMode) { delegate.setLockMode(lockMode); return this; } // Query interface @Override public int executeUpdate() { return delegate.executeUpdate(); } @Override public int getMaxResults() { return delegate.getMaxResults(); } @Override public int getFirstResult() { return delegate.getFirstResult(); } @Override public Map<String, Object> getHints() { return delegate.getHints(); } @Override public Set<Parameter<?>> getParameters() { return delegate.getParameters(); } @Override public Parameter<?> getParameter(String name) { return delegate.getParameter(name); } @Override public <U> Parameter<U> getParameter(String name, Class<U> type) { return delegate.getParameter(name, type); } @Override public Parameter<?> getParameter(int position) { return delegate.getParameter(position); } @Override public <U> Parameter<U> getParameter(int position, Class<U> type) { return delegate.getParameter(position, type); } @Override public boolean isBound(Parameter<?> param) { return delegate.isBound(param); } @Override public <U> U getParameterValue(Parameter<U> param) { return delegate.getParameterValue(param); } @Override public Object getParameterValue(String name) { return delegate.getParameterValue(name); } @Override public Object getParameterValue(int position) { return delegate.getParameterValue(position); } @Override public FlushModeType getFlushMode() { return delegate.getFlushMode(); } @Override public LockModeType getLockMode() { return delegate.getLockMode(); } @Override public <U> U unwrap(Class<U> cls) { return delegate.unwrap(cls); } } private class JpaQueryComposerImpl<T> extends QueryComposer<T> { private static final int DEFAULT_FETCH_SIZE = 1000; private int fetchSize = DEFAULT_FETCH_SIZE; JpaQueryComposerImpl(Class<T> entityClass) { super(entityClass); } private TypedQuery<T> buildQuery() { CriteriaQueryBuilder<T> queryBuilder = CriteriaQueryBuilder.create(JpaTransactionManagerImpl.this, entityClass); return addCriteria(queryBuilder); } private <U> TypedQuery<U> addCriteria(CriteriaQueryBuilder<U> queryBuilder) { for (WhereClause<?> pred : predicates) { pred.addToCriteriaQueryBuilder(queryBuilder); } if (orderBy != null) { queryBuilder.orderByAsc(orderBy); } return getEntityManager().createQuery(queryBuilder.build()); } @Override public QueryComposer<T> withFetchSize(int fetchSize) { checkArgument(fetchSize >= 0, "FetchSize must not be negative"); this.fetchSize = fetchSize; return this; } @Override public Optional<T> first() { List<T> results = buildQuery().setMaxResults(1).getResultList(); return !results.isEmpty() ? Optional.of(detach(results.get(0))) : Optional.empty(); } @Override public T getSingleResult() { return detach(buildQuery().getSingleResult()); } @Override public Stream<T> stream() { if (fetchSize == 0) { logger.atWarning().log("Query result streaming is not enabled."); } TypedQuery<T> query = buildQuery(); JpaTransactionManager.setQueryFetchSize(query, fetchSize); return query.getResultStream().map(JpaTransactionManagerImpl.this::detach); } @Override public long count() { CriteriaQueryBuilder<Long> queryBuilder = CriteriaQueryBuilder.createCount(JpaTransactionManagerImpl.this, entityClass); return addCriteria(queryBuilder).getSingleResult(); } @Override public ImmutableList<T> list() { return buildQuery().getResultList().stream() .map(JpaTransactionManagerImpl.this::detach) .collect(toImmutableList()); } } /** * Provides {@code long} values for use as {@code id} by JPA model entities in (read-only) * transactions in the replica database. Each id is only unique in the JVM instance. * * <p>The {@link #fetchIdFromSequence database sequence-based id allocator} cannot be used with * the replica because id generation is a write operation. */ private static final class ReplicaDbIdService { private static final AtomicLong nextId = new AtomicLong(1); /** * Returns the next long value from a {@link AtomicLong}. Each id is unique in the JVM instance. */ static long allocateId() { return nextId.getAndIncrement(); } } }
apache/incubator-weex
36,694
android/sdk/src/main/java/org/apache/weex/ui/component/WXScroller.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.weex.ui.component; import android.annotation.SuppressLint; import android.annotation.TargetApi; import android.content.Context; import android.graphics.Point; import android.graphics.Rect; import android.os.Build; import android.os.Handler; import android.os.Looper; import android.support.annotation.NonNull; import android.support.v4.view.ViewCompat; import android.text.TextUtils; import android.view.GestureDetector; import android.view.Gravity; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.view.ViewTreeObserver; import android.widget.FrameLayout; import android.widget.FrameLayout.LayoutParams; import org.apache.weex.WXEnvironment; import org.apache.weex.WXSDKInstance; import org.apache.weex.annotation.Component; import org.apache.weex.annotation.JSMethod; import org.apache.weex.common.Constants; import org.apache.weex.common.ICheckBindingScroller; import org.apache.weex.common.OnWXScrollListener; import org.apache.weex.common.WXThread; import org.apache.weex.performance.WXInstanceApm; import org.apache.weex.ui.ComponentCreator; import org.apache.weex.ui.action.BasicComponentData; import org.apache.weex.ui.component.helper.ScrollStartEndHelper; import org.apache.weex.ui.component.helper.WXStickyHelper; import org.apache.weex.ui.view.IWXScroller; import org.apache.weex.ui.view.WXBaseRefreshLayout; import org.apache.weex.ui.view.WXHorizontalScrollView; import org.apache.weex.ui.view.WXScrollView; import org.apache.weex.ui.view.WXScrollView.WXScrollViewListener; import org.apache.weex.ui.view.refresh.wrapper.BaseBounceView; import org.apache.weex.ui.view.refresh.wrapper.BounceScrollerView; import org.apache.weex.utils.WXLogUtils; import org.apache.weex.utils.WXUtils; import org.apache.weex.utils.WXViewUtils; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; /** * Component for scroller. It also support features like * "appear", "disappear" and "sticky" */ @Component(lazyload = false) public class WXScroller extends WXVContainer<ViewGroup> implements WXScrollViewListener,Scrollable { public static final String DIRECTION = "direction"; protected int mOrientation = Constants.Orientation.VERTICAL; private List<WXComponent> mRefreshs=new ArrayList<>(); /** Use for offset children layout */ private int mChildrenLayoutOffset = 0; private boolean mForceLoadmoreNextTime = false; private int mOffsetAccuracy = 10; private Point mLastReport = new Point(-1, -1); private boolean mHasAddScrollEvent = false; private Boolean mIslastDirectionRTL; private static final int SWIPE_MIN_DISTANCE = 5; private static final int SWIPE_THRESHOLD_VELOCITY = 300; private int mActiveFeature = 0; /** * scroll start and scroll end event * */ private ScrollStartEndHelper mScrollStartEndHelper; private GestureDetector mGestureDetector; private int pageSize = 0; private boolean pageEnable = false; private boolean mIsHostAttachedToWindow = false; private View.OnAttachStateChangeListener mOnAttachStateChangeListener; private boolean mlastDirectionRTL = false; public static class Creator implements ComponentCreator { @Override public WXComponent createInstance(WXSDKInstance instance, WXVContainer parent, BasicComponentData basicComponentData) throws IllegalAccessException, InvocationTargetException, InstantiationException { // For performance message collection instance.setUseScroller(true); return new WXScroller(instance, parent, basicComponentData); } } /** * Map for storing appear information **/ private Map<String,AppearanceHelper> mAppearanceComponents = new HashMap<>(); /** * Map for storing component that is sticky. **/ private Map<String, Map<String, WXComponent>> mStickyMap = new HashMap<>(); private FrameLayout mRealView; private FrameLayout mScrollerView; private int mContentHeight = 0; private int mContentWidth = 0; private WXStickyHelper stickyHelper; private Handler handler=new Handler(Looper.getMainLooper()); private boolean isScrollable = true; @Deprecated public WXScroller(WXSDKInstance instance, WXVContainer parent, String instanceId, boolean isLazy, BasicComponentData basicComponentData) { this(instance, parent, basicComponentData); } public WXScroller(WXSDKInstance instance, WXVContainer parent, BasicComponentData basicComponentData) { super(instance, parent, basicComponentData); stickyHelper = new WXStickyHelper(this); instance.getApmForInstance().updateDiffStats(WXInstanceApm.KEY_PAGE_STATS_SCROLLER_NUM,1); } /** * @return FrameLayout inner ScrollView */ @Override public ViewGroup getRealView() { return mScrollerView; } @Override public void createViewImpl() { super.createViewImpl(); for (int i = 0; i < mRefreshs.size(); i++) { WXComponent component = mRefreshs.get(i); component.createViewImpl(); checkRefreshOrLoading(component); } } /** * @return ScrollView */ public ViewGroup getInnerView() { if(getHostView() == null) { return null; } if (getHostView() instanceof BounceScrollerView) { return ((BounceScrollerView) getHostView()).getInnerView(); } else { return getHostView(); } } @Override public void addEvent(String type) { super.addEvent(type); if (ScrollStartEndHelper.isScrollEvent(type) && getInnerView() != null && !mHasAddScrollEvent) { mHasAddScrollEvent = true; if (getInnerView() instanceof WXScrollView) { ((WXScrollView) getInnerView()).addScrollViewListener(new WXScrollViewListener() { @Override public void onScrollChanged(WXScrollView scrollView, int x, int y, int oldx, int oldy) { getScrollStartEndHelper().onScrolled(x, y); if(!getEvents().contains(Constants.Event.SCROLL)){ return; } if (shouldReport(x, y)) { fireScrollEvent(scrollView.getContentFrame(), x, y, oldx, oldy); } } @Override public void onScrollToBottom(WXScrollView scrollView, int x, int y) { //ignore } @Override public void onScrollStopped(WXScrollView scrollView, int x, int y) { //ignore } @Override public void onScroll(WXScrollView scrollView, int x, int y) { //ignore } }); } else if (getInnerView() instanceof WXHorizontalScrollView) { ((WXHorizontalScrollView) getInnerView()).addScrollViewListener(new WXHorizontalScrollView.ScrollViewListener() { @Override public void onScrollChanged(WXHorizontalScrollView scrollView, int x, int y, int oldx, int oldy) { getScrollStartEndHelper().onScrolled(x, y); if(!getEvents().contains(Constants.Event.SCROLL)){ return; } if (shouldReport(x, y)) { fireScrollEvent(scrollView.getContentFrame(), x, y, oldx, oldy); } } }); } } } private void fireScrollEvent(Rect contentFrame, int x, int y, int oldx, int oldy) { fireEvent(Constants.Event.SCROLL, getScrollEvent(x, y)); } public Map<String, Object> getScrollEvent(int x, int y){ Rect contentFrame = new Rect(); if (getInnerView() instanceof WXScrollView) { contentFrame = ((WXScrollView) getInnerView()).getContentFrame(); }else if (getInnerView() instanceof WXHorizontalScrollView) { contentFrame = ((WXHorizontalScrollView) getInnerView()).getContentFrame(); } Map<String, Object> event = new HashMap<>(2); Map<String, Object> contentSize = new HashMap<>(2); Map<String, Object> contentOffset = new HashMap<>(2); int viewport = getInstance().getInstanceViewPortWidth(); contentSize.put(Constants.Name.WIDTH, WXViewUtils.getWebPxByWidth(contentFrame.width(), viewport)); contentSize.put(Constants.Name.HEIGHT, WXViewUtils.getWebPxByWidth(contentFrame.height(), viewport)); contentOffset.put(Constants.Name.X, -WXViewUtils.getWebPxByWidth(x, viewport)); contentOffset.put(Constants.Name.Y, -WXViewUtils.getWebPxByWidth(y, viewport)); event.put(Constants.Name.CONTENT_SIZE, contentSize); event.put(Constants.Name.CONTENT_OFFSET, contentOffset); return event; } private boolean shouldReport(int x, int y) { if (mLastReport.x == -1 && mLastReport.y == -1) { mLastReport.x = x; mLastReport.y = y; return true; } if (mOrientation == Constants.Orientation.HORIZONTAL && Math.abs(x - mLastReport.x) >= mOffsetAccuracy) { mLastReport.x = x; mLastReport.y = y; return true; } if (mOrientation == Constants.Orientation.VERTICAL && Math.abs(y - mLastReport.y) >= mOffsetAccuracy) { mLastReport.x = x; mLastReport.y = y; return true; } return false; } /** * Intercept refresh view and loading view */ @Override public void addSubView(View child, int index) { if (child == null || mRealView == null) { return; } if (child instanceof WXBaseRefreshLayout) { return; } int count = mRealView.getChildCount(); index = index >= count ? -1 : index; if (index == -1) { mRealView.addView(child); } else { mRealView.addView(child, index); } } @Override protected int getChildrenLayoutTopOffset() { if (mChildrenLayoutOffset == 0) { // Child LayoutSize data set after call Layout. So init mChildrenLayoutOffset here final int listSize = mRefreshs.size(); if (listSize > 0) { for (int i = 0; i < listSize; i++) { WXComponent child = mRefreshs.get(i); mChildrenLayoutOffset += child.getLayoutTopOffsetForSibling(); } } } return mChildrenLayoutOffset; } /** * Intercept refresh view and loading view */ @Override public void addChild(WXComponent child, int index) { if (child instanceof WXBaseRefresh) { if (checkRefreshOrLoading(child)) { mRefreshs.add(child); } } super.addChild(child, index); } /** * Setting refresh view and loading view * @param child the refresh_view or loading_view */ private boolean checkRefreshOrLoading(final WXComponent child) { boolean result = false; if (child instanceof WXRefresh && getHostView() != null) { ((BaseBounceView) getHostView()).setOnRefreshListener((WXRefresh) child); Runnable runnable = WXThread.secure(new Runnable(){ @Override public void run() { ((BaseBounceView) getHostView()).setHeaderView(child); } }); handler.postDelayed(runnable,100); result = true; } if (child instanceof WXLoading && getHostView() !=null) { ((BaseBounceView) getHostView()).setOnLoadingListener((WXLoading)child); Runnable runnable= WXThread.secure(new Runnable(){ @Override public void run() { ((BaseBounceView) getHostView()).setFooterView(child); } }); handler.postDelayed(runnable, 100); result = true; } return result; } @Override public void remove(WXComponent child,boolean destory) { super.remove(child,destory); if(child instanceof WXLoading){ ((BaseBounceView)getHostView()).removeFooterView(child); }else if(child instanceof WXRefresh){ ((BaseBounceView)getHostView()).removeHeaderView(child); } } @Override public void destroy() { super.destroy(); if (mAppearanceComponents != null) { mAppearanceComponents.clear(); } if (mStickyMap != null) { mStickyMap.clear(); } if (mOnAttachStateChangeListener != null && getInnerView() != null) { getInnerView().removeOnAttachStateChangeListener(mOnAttachStateChangeListener); } if (getInnerView() != null && getInnerView() instanceof IWXScroller) { ((IWXScroller) getInnerView()).destroy(); } } @SuppressLint("RtlHardcoded") @Override public void setMarginsSupportRTL(ViewGroup.MarginLayoutParams lp, int left, int top, int right, int bottom) { if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.JELLY_BEAN_MR1) { lp.setMargins(left, top, right, bottom); lp.setMarginStart(left); lp.setMarginEnd(right); } else { if (lp instanceof FrameLayout.LayoutParams) { FrameLayout.LayoutParams lp_frameLayout = (FrameLayout.LayoutParams) lp; if (isLayoutRTL()) { lp_frameLayout.gravity = Gravity.RIGHT | Gravity.TOP; lp.setMargins(right, top, left, bottom); } else { lp_frameLayout.gravity = Gravity.LEFT | Gravity.TOP; lp.setMargins(left, top, right, bottom); } } else { lp.setMargins(left, top, right, bottom); } } } @Override public void setLayout(WXComponent component) { if (TextUtils.isEmpty(component.getComponentType()) || TextUtils.isEmpty(component.getRef()) || component.getLayoutPosition() == null || component.getLayoutSize() == null) { return; } if (component.getHostView() != null) { int layoutDirection = component.isLayoutRTL() ? ViewCompat.LAYOUT_DIRECTION_RTL : ViewCompat.LAYOUT_DIRECTION_LTR; ViewCompat.setLayoutDirection(component.getHostView(), layoutDirection); } super.setLayout(component); } @Override protected MeasureOutput measure(int width, int height) { MeasureOutput measureOutput = new MeasureOutput(); if (this.mOrientation == Constants.Orientation.HORIZONTAL) { int screenW = WXViewUtils.getScreenWidth(WXEnvironment.sApplication); int weexW = WXViewUtils.getWeexWidth(getInstanceId()); measureOutput.width = width > (weexW >= screenW ? screenW : weexW) ? FrameLayout.LayoutParams.MATCH_PARENT : width; measureOutput.height = height; } else { int screenH = WXViewUtils.getScreenHeight(WXEnvironment.sApplication); int weexH = WXViewUtils.getWeexHeight(getInstanceId()); measureOutput.height = height > (weexH >= screenH ? screenH : weexH) ? FrameLayout.LayoutParams.MATCH_PARENT : height; measureOutput.width = width; } return measureOutput; } @SuppressLint("ClickableViewAccessibility") @Override protected ViewGroup initComponentHostView(@NonNull Context context) { String scroll; if (getAttrs().isEmpty()) { scroll = "vertical"; } else { scroll = getAttrs().getScrollDirection(); Object o = getAttrs().get(Constants.Name.PAGE_ENABLED); pageEnable = o != null && Boolean.parseBoolean(o.toString()); Object pageSize = getAttrs().get(Constants.Name.PAGE_SIZE); if (pageSize != null) { float aFloat = WXUtils.getFloat(pageSize); float realPxByWidth = WXViewUtils.getRealPxByWidth(aFloat, getInstance().getInstanceViewPortWidth()); if (realPxByWidth != 0) { this.pageSize = (int) realPxByWidth; } } } ViewGroup host; if(("horizontal").equals(scroll)){ mOrientation = Constants.Orientation.HORIZONTAL; final WXHorizontalScrollView scrollView = new WXHorizontalScrollView(context); mRealView = new FrameLayout(context); scrollView.setScrollViewListener(new WXHorizontalScrollView.ScrollViewListener() { @Override public void onScrollChanged(WXHorizontalScrollView scrollView, int x, int y, int oldx, int oldy) { procAppear(x,y,oldx,oldy); onLoadMore(scrollView,x,y); } }); FrameLayout.LayoutParams layoutParams = new FrameLayout.LayoutParams( LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT); scrollView.addView(mRealView, layoutParams); scrollView.setHorizontalScrollBarEnabled(false); mScrollerView = scrollView; final WXScroller component = this; final View.OnLayoutChangeListener listener = new View.OnLayoutChangeListener() { @Override public void onLayoutChange(View view, final int left, int top, final int right, int bottom, final int oldLeft, int oldTop, final int oldRight, int oldBottom) { final View frameLayout = view; scrollView.post(new Runnable() { @Override public void run() { if (mIslastDirectionRTL != null && isLayoutRTL() != mIslastDirectionRTL) { // when layout direction changed we need convert x to RTL x for scroll to the same item int currentX = getScrollX(); int totalWidth = getInnerView().getChildAt(0).getWidth(); int displayWidth = getInnerView().getMeasuredWidth(); scrollView.scrollTo(totalWidth - currentX - displayWidth, component.getScrollY()); } else if (isLayoutRTL()) { // if layout direction not changed, but width changede, we need keep RTL offset int oldWidth = oldRight - oldLeft; int width = right - left; int changedWidth = width - oldWidth; if (changedWidth != 0) { scrollView.scrollBy(changedWidth, component.getScrollY()); } } mIslastDirectionRTL = isLayoutRTL(); } }); } }; mRealView.addOnAttachStateChangeListener(new View.OnAttachStateChangeListener() { @Override public void onViewAttachedToWindow(View view) { view.addOnLayoutChangeListener(listener); } @Override public void onViewDetachedFromWindow(View view) { view.removeOnLayoutChangeListener(listener); } }); if(pageEnable) { mGestureDetector = new GestureDetector(new MyGestureDetector(scrollView)); scrollView.setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { if (pageSize == 0) { pageSize = v.getMeasuredWidth(); } if (mGestureDetector.onTouchEvent(event)) { return true; } else if(event.getAction() == MotionEvent.ACTION_UP || event.getAction() == MotionEvent.ACTION_CANCEL ){ int scrollX = getScrollX(); int featureWidth = pageSize; mActiveFeature = ((scrollX + (featureWidth/2))/featureWidth); int scrollTo = mActiveFeature*featureWidth; scrollView.smoothScrollTo(scrollTo, 0); return true; } else{ return false; } } }); } host = scrollView; }else{ mOrientation = Constants.Orientation.VERTICAL; BounceScrollerView scrollerView = new BounceScrollerView(context, mOrientation, this); mRealView = new FrameLayout(context); WXScrollView innerView = scrollerView.getInnerView(); innerView.addScrollViewListener(this); FrameLayout.LayoutParams layoutParams = new FrameLayout.LayoutParams( LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT); mScrollerView = innerView; innerView.addView(mRealView, layoutParams); innerView.setVerticalScrollBarEnabled(true); innerView.setNestedScrollingEnabled(WXUtils.getBoolean(getAttrs().get(Constants.Name.NEST_SCROLLING_ENABLED), true)); innerView.addScrollViewListener(new WXScrollViewListener() { @Override public void onScrollChanged(WXScrollView scrollView, int x, int y, int oldx, int oldy) { } @Override public void onScrollToBottom(WXScrollView scrollView, int x, int y) { } @Override public void onScrollStopped(WXScrollView scrollView, int x, int y) { List<OnWXScrollListener> listeners = getInstance().getWXScrollListeners(); if(listeners!=null && listeners.size()>0){ for (OnWXScrollListener listener : listeners) { if (listener != null) { listener.onScrollStateChanged(scrollView,x,y,OnWXScrollListener.IDLE); } } } getScrollStartEndHelper().onScrollStateChanged(OnWXScrollListener.IDLE); } @Override public void onScroll(WXScrollView scrollView, int x, int y) { List<OnWXScrollListener> listeners = getInstance().getWXScrollListeners(); if(listeners!=null && listeners.size()>0){ for (OnWXScrollListener listener : listeners) { if (listener != null) { if(listener instanceof ICheckBindingScroller){ if(((ICheckBindingScroller) listener).isNeedScroller(getRef(),null)){ listener.onScrolled(scrollView, x, y); } }else { listener.onScrolled(scrollView, x, y); } } } } } }); host = scrollerView; } host.getViewTreeObserver().addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() { @TargetApi(Build.VERSION_CODES.JELLY_BEAN) @Override public void onGlobalLayout() { procAppear(0,0,0,0); View view; if( (view = getHostView()) == null){ return; } if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) { view.getViewTreeObserver().removeOnGlobalLayoutListener(this); }else{ view.getViewTreeObserver().removeGlobalOnLayoutListener(this); } } }); mOnAttachStateChangeListener = new View.OnAttachStateChangeListener() { @Override public void onViewAttachedToWindow(View v) { mIsHostAttachedToWindow = true; procAppear(getScrollX(), getScrollY(), getScrollX(), getScrollY()); } @Override public void onViewDetachedFromWindow(View v) { mIsHostAttachedToWindow = false; dispatchDisappearEvent(); } }; host.addOnAttachStateChangeListener(mOnAttachStateChangeListener); return host; } @Override public int getScrollY() { return getInnerView() == null ? 0 : getInnerView().getScrollY(); } @Override public int getScrollX() { return getInnerView() == null ? 0 : getInnerView().getScrollX(); } @Override public int getOrientation() { return mOrientation; } public Map<String, Map<String, WXComponent>> getStickMap() { return mStickyMap; } @Override protected boolean setProperty(String key, Object param) { switch (key) { case Constants.Name.SHOW_SCROLLBAR: Boolean result = WXUtils.getBoolean(param,null); if (result != null) { setShowScrollbar(result); } return true; case Constants.Name.SCROLLABLE: boolean scrollable = WXUtils.getBoolean(param, true); setScrollable(scrollable); return true; case Constants.Name.OFFSET_ACCURACY: int accuracy = WXUtils.getInteger(param, 10); setOffsetAccuracy(accuracy); return true; default: break; } return super.setProperty(key, param); } @WXComponentProp(name = Constants.Name.SHOW_SCROLLBAR) public void setShowScrollbar(boolean show) { if(getInnerView()==null){ return; } if (mOrientation == Constants.Orientation.VERTICAL) { getInnerView().setVerticalScrollBarEnabled(show); } else { getInnerView().setHorizontalScrollBarEnabled(show); } } @WXComponentProp(name = Constants.Name.SCROLLABLE) public void setScrollable(boolean scrollable) { this.isScrollable = scrollable; View hostView = getInnerView(); if(hostView instanceof WXHorizontalScrollView) { ((WXHorizontalScrollView)hostView).setScrollable(scrollable); }else if(hostView instanceof WXScrollView) { ((WXScrollView)hostView).setScrollable(scrollable); } } @WXComponentProp(name = Constants.Name.OFFSET_ACCURACY) public void setOffsetAccuracy(int accuracy) { float realPx = WXViewUtils.getRealPxByWidth(accuracy, getInstance().getInstanceViewPortWidth()); this.mOffsetAccuracy = (int) realPx; } @Override public boolean isScrollable() { return isScrollable; } @Override public void bindStickStyle(WXComponent component) { stickyHelper.bindStickStyle(component,mStickyMap); } @Override public void unbindStickStyle(WXComponent component) { stickyHelper.unbindStickStyle(component,mStickyMap); } /** * Bind appear event */ @Override public void bindAppearEvent(WXComponent component) { setWatch(AppearanceHelper.APPEAR,component,true); } private void setWatch(int event,WXComponent component,boolean isWatch){ AppearanceHelper item = mAppearanceComponents.get(component.getRef()); if (item == null) { item = new AppearanceHelper(component); mAppearanceComponents.put(component.getRef(),item); } item.setWatchEvent(event,isWatch); //check current components appearance status. procAppear(0,0,0,0); } /** * Bind disappear event */ @Override public void bindDisappearEvent(WXComponent component) { setWatch(AppearanceHelper.DISAPPEAR,component,true); } /** * Remove appear event */ @Override public void unbindAppearEvent(WXComponent component) { setWatch(AppearanceHelper.APPEAR,component,false); } /** * Remove disappear event */ @Override public void unbindDisappearEvent(WXComponent component) { setWatch(AppearanceHelper.DISAPPEAR,component,false); } @Override public void scrollTo(WXComponent component, Map<String, Object> options) { float offsetFloat = 0; boolean smooth = true; if (options != null) { String offset = options.get(Constants.Name.OFFSET) == null ? "0" : options.get(Constants.Name.OFFSET).toString(); smooth = WXUtils.getBoolean(options.get(Constants.Name.ANIMATED), true); if (offset != null) { try { offsetFloat = WXViewUtils.getRealPxByWidth(Float.parseFloat(offset), getInstance().getInstanceViewPortWidth()); }catch (Exception e ){ WXLogUtils.e("Float parseFloat error :"+e.getMessage()); } } } if(pageEnable) { mActiveFeature = mChildren.indexOf(component); } int viewYInScroller = component.getAbsoluteY() - getAbsoluteY(); int viewXInScroller = 0; if (this.isLayoutRTL()) { // if layout direction is rtl, we need calculate rtl scroll x; if (component.getParent() != null && component.getParent() == this) { if (getInnerView().getChildCount() > 0) { int totalWidth = getInnerView().getChildAt(0).getWidth(); int displayWidth = getInnerView().getMeasuredWidth(); viewXInScroller = totalWidth - (component.getAbsoluteX() - getAbsoluteX()) - displayWidth; } else { viewXInScroller = component.getAbsoluteX() - getAbsoluteX(); } } else { int displayWidth = getInnerView().getMeasuredWidth(); viewXInScroller = component.getAbsoluteX() - getAbsoluteX() - displayWidth + (int)component.getLayoutWidth(); } offsetFloat = -offsetFloat; } else { viewXInScroller = component.getAbsoluteX() - getAbsoluteX(); } scrollBy(viewXInScroller - getScrollX() + (int) offsetFloat, viewYInScroller - getScrollY() + (int) offsetFloat, smooth); } /** * Scroll by specified distance. Horizontal scroll is not supported now. * @param x horizontal distance, not support * @param y vertical distance. Negative for scroll to top */ public void scrollBy(final int x, final int y) { scrollBy(x, y, false); } public void scrollBy(final int x, final int y, final boolean smooth) { if (getInnerView() == null) { return; } getInnerView().postDelayed(new Runnable() { @Override public void run() { if (mOrientation == Constants.Orientation.VERTICAL) { if (smooth) { ((WXScrollView) getInnerView()).smoothScrollBy(0, y); } else { ((WXScrollView) getInnerView()).scrollBy(0, y); } } else { if (smooth) { ((WXHorizontalScrollView) getInnerView()).smoothScrollBy(x, 0); } else { ((WXHorizontalScrollView) getInnerView()).scrollBy(x, 0); } } getInnerView().invalidate(); } }, 16); } @Override public void onScrollChanged(WXScrollView scrollView, int x, int y, int oldx, int oldy) { procAppear( x, y, oldx, oldy); } @Override public void notifyAppearStateChange(String wxEventType, String direction) { if (containsEvent(Constants.Event.APPEAR) || containsEvent(Constants.Event.DISAPPEAR)) { Map<String, Object> params = new HashMap<>(); params.put("direction", direction); fireEvent(wxEventType, params); } // No-op. The moment to notify children is decided by the time when scroller is attached // or detached to window. Do not call super as scrollview has different disposal. } /** * Process event like appear and disappear * * This method will be invoked in several situation below. * 1. bind or unbind event * 2. host view is attached to window * 3. when scrollview is scrolling */ private void procAppear(int x, int y, int oldx, int oldy) { if (!mIsHostAttachedToWindow) return; int moveY = y - oldy; int moveX = x - oldx; String direction = moveY > 0 ? Constants.Value.DIRECTION_UP : moveY < 0 ? Constants.Value.DIRECTION_DOWN : null; if (mOrientation == Constants.Orientation.HORIZONTAL && moveX != 0) { direction = moveX > 0 ? Constants.Value.DIRECTION_RIGHT : Constants.Value.DIRECTION_LEFT; } for (Entry<String, AppearanceHelper> item : mAppearanceComponents.entrySet()) { AppearanceHelper helper = item.getValue(); if (!helper.isWatch()) { continue; } boolean visible = checkItemVisibleInScroller(helper.getAwareChild()); int result = helper.setAppearStatus(visible); if (result != AppearanceHelper.RESULT_NO_CHANGE) { helper.getAwareChild().notifyAppearStateChange(result == AppearanceHelper.RESULT_APPEAR ? Constants.Event.APPEAR : Constants.Event.DISAPPEAR, direction); } } } /** * Check the view of given component is visible in scrollview. * * @param component ready to be check * @return item is visible */ private boolean checkItemVisibleInScroller(WXComponent component) { boolean visible = false; while (component != null && !(component instanceof WXScroller)) { if (component.getParent() instanceof WXScroller) { if (mOrientation == Constants.Orientation.HORIZONTAL) { int offsetLeft = (int) component.getLayoutPosition().getLeft() - getScrollX(); visible = (offsetLeft > 0 - component.getLayoutWidth() && offsetLeft < getLayoutWidth()); } else { int offsetTop = (int) component.getLayoutPosition().getTop() - getScrollY(); visible = (offsetTop > 0 - component.getLayoutHeight() && offsetTop < getLayoutHeight()); } } component = component.getParent(); } return visible; } /** * Dispatch disappear event to the child components in need. */ private void dispatchDisappearEvent() { for (Entry<String, AppearanceHelper> item : mAppearanceComponents.entrySet()) { AppearanceHelper helper = item.getValue(); if (!helper.isWatch()) { continue; } int result = helper.setAppearStatus(false); if (result != AppearanceHelper.RESULT_NO_CHANGE) { helper.getAwareChild().notifyAppearStateChange(result == AppearanceHelper.RESULT_APPEAR ? Constants.Event.APPEAR : Constants.Event.DISAPPEAR, ""); } } } @Override public void onScrollToBottom(WXScrollView scrollView, int x, int y) { } @Override public void onScrollStopped(WXScrollView scrollView, int x, int y) { } @Override public void onScroll(WXScrollView scrollView, int x, int y) { this.onLoadMore(scrollView, x, y); } /** * Handle loadMore Event.when Scroller has bind loadMore Event and set the attr of loadMoreOffset * it will tell the JS to handle the event of onLoadMore; * @param scrollView the WXScrollView * @param x the X direction * @param y the Y direction */ protected void onLoadMore(FrameLayout scrollView, int x, int y) { try { String offset = getAttrs().getLoadMoreOffset(); if (TextUtils.isEmpty(offset)) { return; } int offsetInt = (int)WXViewUtils.getRealPxByWidth(Float.parseFloat(offset), getInstance().getInstanceViewPortWidth()); if (scrollView instanceof WXHorizontalScrollView){ int contentWidth = scrollView.getChildAt(0).getWidth(); int offScreenX = contentWidth-x-scrollView.getWidth(); if (offScreenX < offsetInt && (mContentWidth != contentWidth || mForceLoadmoreNextTime)){ fireEvent(Constants.Event.LOADMORE); mContentWidth=contentWidth; mForceLoadmoreNextTime = false; } }else { int contentH = scrollView.getChildAt(0).getHeight(); int scrollerH = scrollView.getHeight(); int offScreenY = contentH - y - scrollerH; if (offScreenY < offsetInt) { if (WXEnvironment.isApkDebugable()) { WXLogUtils.d("[WXScroller-onScroll] offScreenY :" + offScreenY); } if (mContentHeight != contentH || mForceLoadmoreNextTime) { fireEvent(Constants.Event.LOADMORE); mContentHeight = contentH; mForceLoadmoreNextTime = false; } } } } catch (Exception e) { WXLogUtils.d("[WXScroller-onScroll] ", e); } } @JSMethod public void resetLoadmore() { mForceLoadmoreNextTime = true; } public ScrollStartEndHelper getScrollStartEndHelper() { if(mScrollStartEndHelper == null){ mScrollStartEndHelper = new ScrollStartEndHelper(this); } return mScrollStartEndHelper; } class MyGestureDetector extends GestureDetector.SimpleOnGestureListener { public WXHorizontalScrollView getScrollView() { return scrollView; } private final WXHorizontalScrollView scrollView; MyGestureDetector(WXHorizontalScrollView horizontalScrollView) { scrollView = horizontalScrollView; } @Override public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) { int mItems = mChildren.size(); try { //right to left if(e1.getX() - e2.getX() > SWIPE_MIN_DISTANCE && Math.abs(velocityX) > SWIPE_THRESHOLD_VELOCITY) { int featureWidth = pageSize; mActiveFeature = (mActiveFeature < (mItems - 1))? mActiveFeature + 1:mItems -1; scrollView.smoothScrollTo(mActiveFeature*featureWidth, 0); return true; } //left to right else if (e2.getX() - e1.getX() > SWIPE_MIN_DISTANCE && Math.abs(velocityX) > SWIPE_THRESHOLD_VELOCITY) { int featureWidth = pageSize; mActiveFeature = (mActiveFeature > 0)? mActiveFeature - 1:0; scrollView.smoothScrollTo(mActiveFeature*featureWidth, 0); return true; } } catch (Exception e) { WXLogUtils.e("There was an error processing the Fling event:" + e.getMessage()); } return false; } } }
googleapis/google-cloud-java
36,533
java-discoveryengine/proto-google-cloud-discoveryengine-v1beta/src/main/java/com/google/cloud/discoveryengine/v1beta/ListControlsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/discoveryengine/v1beta/control_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.discoveryengine.v1beta; /** * * * <pre> * Response for ListControls method. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1beta.ListControlsResponse} */ public final class ListControlsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1beta.ListControlsResponse) ListControlsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListControlsResponse.newBuilder() to construct. private ListControlsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListControlsResponse() { controls_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListControlsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1beta.ControlServiceProto .internal_static_google_cloud_discoveryengine_v1beta_ListControlsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1beta.ControlServiceProto .internal_static_google_cloud_discoveryengine_v1beta_ListControlsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1beta.ListControlsResponse.class, com.google.cloud.discoveryengine.v1beta.ListControlsResponse.Builder.class); } public static final int CONTROLS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.discoveryengine.v1beta.Control> controls_; /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1beta.Control controls = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.discoveryengine.v1beta.Control> getControlsList() { return controls_; } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1beta.Control controls = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.discoveryengine.v1beta.ControlOrBuilder> getControlsOrBuilderList() { return controls_; } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1beta.Control controls = 1;</code> */ @java.lang.Override public int getControlsCount() { return controls_.size(); } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1beta.Control controls = 1;</code> */ @java.lang.Override public com.google.cloud.discoveryengine.v1beta.Control getControls(int index) { return controls_.get(index); } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1beta.Control controls = 1;</code> */ @java.lang.Override public com.google.cloud.discoveryengine.v1beta.ControlOrBuilder getControlsOrBuilder(int index) { return controls_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Pagination token, if not returned indicates the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * Pagination token, if not returned indicates the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < controls_.size(); i++) { output.writeMessage(1, controls_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < controls_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, controls_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.discoveryengine.v1beta.ListControlsResponse)) { return super.equals(obj); } com.google.cloud.discoveryengine.v1beta.ListControlsResponse other = (com.google.cloud.discoveryengine.v1beta.ListControlsResponse) obj; if (!getControlsList().equals(other.getControlsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getControlsCount() > 0) { hash = (37 * hash) + CONTROLS_FIELD_NUMBER; hash = (53 * hash) + getControlsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.discoveryengine.v1beta.ListControlsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1beta.ListControlsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1beta.ListControlsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1beta.ListControlsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1beta.ListControlsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1beta.ListControlsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1beta.ListControlsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1beta.ListControlsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1beta.ListControlsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1beta.ListControlsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1beta.ListControlsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1beta.ListControlsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.discoveryengine.v1beta.ListControlsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response for ListControls method. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1beta.ListControlsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1beta.ListControlsResponse) com.google.cloud.discoveryengine.v1beta.ListControlsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1beta.ControlServiceProto .internal_static_google_cloud_discoveryengine_v1beta_ListControlsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1beta.ControlServiceProto .internal_static_google_cloud_discoveryengine_v1beta_ListControlsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1beta.ListControlsResponse.class, com.google.cloud.discoveryengine.v1beta.ListControlsResponse.Builder.class); } // Construct using com.google.cloud.discoveryengine.v1beta.ListControlsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (controlsBuilder_ == null) { controls_ = java.util.Collections.emptyList(); } else { controls_ = null; controlsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.discoveryengine.v1beta.ControlServiceProto .internal_static_google_cloud_discoveryengine_v1beta_ListControlsResponse_descriptor; } @java.lang.Override public com.google.cloud.discoveryengine.v1beta.ListControlsResponse getDefaultInstanceForType() { return com.google.cloud.discoveryengine.v1beta.ListControlsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.discoveryengine.v1beta.ListControlsResponse build() { com.google.cloud.discoveryengine.v1beta.ListControlsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.discoveryengine.v1beta.ListControlsResponse buildPartial() { com.google.cloud.discoveryengine.v1beta.ListControlsResponse result = new com.google.cloud.discoveryengine.v1beta.ListControlsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.discoveryengine.v1beta.ListControlsResponse result) { if (controlsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { controls_ = java.util.Collections.unmodifiableList(controls_); bitField0_ = (bitField0_ & ~0x00000001); } result.controls_ = controls_; } else { result.controls_ = controlsBuilder_.build(); } } private void buildPartial0( com.google.cloud.discoveryengine.v1beta.ListControlsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.discoveryengine.v1beta.ListControlsResponse) { return mergeFrom((com.google.cloud.discoveryengine.v1beta.ListControlsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.discoveryengine.v1beta.ListControlsResponse other) { if (other == com.google.cloud.discoveryengine.v1beta.ListControlsResponse.getDefaultInstance()) return this; if (controlsBuilder_ == null) { if (!other.controls_.isEmpty()) { if (controls_.isEmpty()) { controls_ = other.controls_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureControlsIsMutable(); controls_.addAll(other.controls_); } onChanged(); } } else { if (!other.controls_.isEmpty()) { if (controlsBuilder_.isEmpty()) { controlsBuilder_.dispose(); controlsBuilder_ = null; controls_ = other.controls_; bitField0_ = (bitField0_ & ~0x00000001); controlsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getControlsFieldBuilder() : null; } else { controlsBuilder_.addAllMessages(other.controls_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.discoveryengine.v1beta.Control m = input.readMessage( com.google.cloud.discoveryengine.v1beta.Control.parser(), extensionRegistry); if (controlsBuilder_ == null) { ensureControlsIsMutable(); controls_.add(m); } else { controlsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.discoveryengine.v1beta.Control> controls_ = java.util.Collections.emptyList(); private void ensureControlsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { controls_ = new java.util.ArrayList<com.google.cloud.discoveryengine.v1beta.Control>(controls_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.discoveryengine.v1beta.Control, com.google.cloud.discoveryengine.v1beta.Control.Builder, com.google.cloud.discoveryengine.v1beta.ControlOrBuilder> controlsBuilder_; /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1beta.Control controls = 1;</code> */ public java.util.List<com.google.cloud.discoveryengine.v1beta.Control> getControlsList() { if (controlsBuilder_ == null) { return java.util.Collections.unmodifiableList(controls_); } else { return controlsBuilder_.getMessageList(); } } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1beta.Control controls = 1;</code> */ public int getControlsCount() { if (controlsBuilder_ == null) { return controls_.size(); } else { return controlsBuilder_.getCount(); } } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1beta.Control controls = 1;</code> */ public com.google.cloud.discoveryengine.v1beta.Control getControls(int index) { if (controlsBuilder_ == null) { return controls_.get(index); } else { return controlsBuilder_.getMessage(index); } } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1beta.Control controls = 1;</code> */ public Builder setControls(int index, com.google.cloud.discoveryengine.v1beta.Control value) { if (controlsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureControlsIsMutable(); controls_.set(index, value); onChanged(); } else { controlsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1beta.Control controls = 1;</code> */ public Builder setControls( int index, com.google.cloud.discoveryengine.v1beta.Control.Builder builderForValue) { if (controlsBuilder_ == null) { ensureControlsIsMutable(); controls_.set(index, builderForValue.build()); onChanged(); } else { controlsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1beta.Control controls = 1;</code> */ public Builder addControls(com.google.cloud.discoveryengine.v1beta.Control value) { if (controlsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureControlsIsMutable(); controls_.add(value); onChanged(); } else { controlsBuilder_.addMessage(value); } return this; } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1beta.Control controls = 1;</code> */ public Builder addControls(int index, com.google.cloud.discoveryengine.v1beta.Control value) { if (controlsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureControlsIsMutable(); controls_.add(index, value); onChanged(); } else { controlsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1beta.Control controls = 1;</code> */ public Builder addControls( com.google.cloud.discoveryengine.v1beta.Control.Builder builderForValue) { if (controlsBuilder_ == null) { ensureControlsIsMutable(); controls_.add(builderForValue.build()); onChanged(); } else { controlsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1beta.Control controls = 1;</code> */ public Builder addControls( int index, com.google.cloud.discoveryengine.v1beta.Control.Builder builderForValue) { if (controlsBuilder_ == null) { ensureControlsIsMutable(); controls_.add(index, builderForValue.build()); onChanged(); } else { controlsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1beta.Control controls = 1;</code> */ public Builder addAllControls( java.lang.Iterable<? extends com.google.cloud.discoveryengine.v1beta.Control> values) { if (controlsBuilder_ == null) { ensureControlsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, controls_); onChanged(); } else { controlsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1beta.Control controls = 1;</code> */ public Builder clearControls() { if (controlsBuilder_ == null) { controls_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { controlsBuilder_.clear(); } return this; } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1beta.Control controls = 1;</code> */ public Builder removeControls(int index) { if (controlsBuilder_ == null) { ensureControlsIsMutable(); controls_.remove(index); onChanged(); } else { controlsBuilder_.remove(index); } return this; } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1beta.Control controls = 1;</code> */ public com.google.cloud.discoveryengine.v1beta.Control.Builder getControlsBuilder(int index) { return getControlsFieldBuilder().getBuilder(index); } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1beta.Control controls = 1;</code> */ public com.google.cloud.discoveryengine.v1beta.ControlOrBuilder getControlsOrBuilder( int index) { if (controlsBuilder_ == null) { return controls_.get(index); } else { return controlsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1beta.Control controls = 1;</code> */ public java.util.List<? extends com.google.cloud.discoveryengine.v1beta.ControlOrBuilder> getControlsOrBuilderList() { if (controlsBuilder_ != null) { return controlsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(controls_); } } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1beta.Control controls = 1;</code> */ public com.google.cloud.discoveryengine.v1beta.Control.Builder addControlsBuilder() { return getControlsFieldBuilder() .addBuilder(com.google.cloud.discoveryengine.v1beta.Control.getDefaultInstance()); } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1beta.Control controls = 1;</code> */ public com.google.cloud.discoveryengine.v1beta.Control.Builder addControlsBuilder(int index) { return getControlsFieldBuilder() .addBuilder(index, com.google.cloud.discoveryengine.v1beta.Control.getDefaultInstance()); } /** * * * <pre> * All the Controls for a given data store. * </pre> * * <code>repeated .google.cloud.discoveryengine.v1beta.Control controls = 1;</code> */ public java.util.List<com.google.cloud.discoveryengine.v1beta.Control.Builder> getControlsBuilderList() { return getControlsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.discoveryengine.v1beta.Control, com.google.cloud.discoveryengine.v1beta.Control.Builder, com.google.cloud.discoveryengine.v1beta.ControlOrBuilder> getControlsFieldBuilder() { if (controlsBuilder_ == null) { controlsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.discoveryengine.v1beta.Control, com.google.cloud.discoveryengine.v1beta.Control.Builder, com.google.cloud.discoveryengine.v1beta.ControlOrBuilder>( controls_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); controls_ = null; } return controlsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Pagination token, if not returned indicates the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Pagination token, if not returned indicates the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Pagination token, if not returned indicates the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Pagination token, if not returned indicates the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Pagination token, if not returned indicates the last page. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1beta.ListControlsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1beta.ListControlsResponse) private static final com.google.cloud.discoveryengine.v1beta.ListControlsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1beta.ListControlsResponse(); } public static com.google.cloud.discoveryengine.v1beta.ListControlsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListControlsResponse> PARSER = new com.google.protobuf.AbstractParser<ListControlsResponse>() { @java.lang.Override public ListControlsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListControlsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListControlsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.discoveryengine.v1beta.ListControlsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,630
java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrajectoryRecallInstance.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1beta1/evaluation_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1beta1; /** * * * <pre> * Spec for TrajectoryRecall instance. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance} */ public final class TrajectoryRecallInstance extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance) TrajectoryRecallInstanceOrBuilder { private static final long serialVersionUID = 0L; // Use TrajectoryRecallInstance.newBuilder() to construct. private TrajectoryRecallInstance(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private TrajectoryRecallInstance() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new TrajectoryRecallInstance(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1beta1_TrajectoryRecallInstance_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1beta1_TrajectoryRecallInstance_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance.class, com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance.Builder.class); } private int bitField0_; public static final int PREDICTED_TRAJECTORY_FIELD_NUMBER = 1; private com.google.cloud.aiplatform.v1beta1.Trajectory predictedTrajectory_; /** * * * <pre> * Required. Spec for predicted tool call trajectory. * </pre> * * <code> * optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the predictedTrajectory field is set. */ @java.lang.Override public boolean hasPredictedTrajectory() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Spec for predicted tool call trajectory. * </pre> * * <code> * optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The predictedTrajectory. */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.Trajectory getPredictedTrajectory() { return predictedTrajectory_ == null ? com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance() : predictedTrajectory_; } /** * * * <pre> * Required. Spec for predicted tool call trajectory. * </pre> * * <code> * optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder getPredictedTrajectoryOrBuilder() { return predictedTrajectory_ == null ? com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance() : predictedTrajectory_; } public static final int REFERENCE_TRAJECTORY_FIELD_NUMBER = 2; private com.google.cloud.aiplatform.v1beta1.Trajectory referenceTrajectory_; /** * * * <pre> * Required. Spec for reference tool call trajectory. * </pre> * * <code> * optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the referenceTrajectory field is set. */ @java.lang.Override public boolean hasReferenceTrajectory() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Spec for reference tool call trajectory. * </pre> * * <code> * optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The referenceTrajectory. */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.Trajectory getReferenceTrajectory() { return referenceTrajectory_ == null ? com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance() : referenceTrajectory_; } /** * * * <pre> * Required. Spec for reference tool call trajectory. * </pre> * * <code> * optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder getReferenceTrajectoryOrBuilder() { return referenceTrajectory_ == null ? com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance() : referenceTrajectory_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getPredictedTrajectory()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getReferenceTrajectory()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getPredictedTrajectory()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getReferenceTrajectory()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance)) { return super.equals(obj); } com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance other = (com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance) obj; if (hasPredictedTrajectory() != other.hasPredictedTrajectory()) return false; if (hasPredictedTrajectory()) { if (!getPredictedTrajectory().equals(other.getPredictedTrajectory())) return false; } if (hasReferenceTrajectory() != other.hasReferenceTrajectory()) return false; if (hasReferenceTrajectory()) { if (!getReferenceTrajectory().equals(other.getReferenceTrajectory())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasPredictedTrajectory()) { hash = (37 * hash) + PREDICTED_TRAJECTORY_FIELD_NUMBER; hash = (53 * hash) + getPredictedTrajectory().hashCode(); } if (hasReferenceTrajectory()) { hash = (37 * hash) + REFERENCE_TRAJECTORY_FIELD_NUMBER; hash = (53 * hash) + getReferenceTrajectory().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Spec for TrajectoryRecall instance. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance) com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstanceOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1beta1_TrajectoryRecallInstance_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1beta1_TrajectoryRecallInstance_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance.class, com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance.Builder.class); } // Construct using com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getPredictedTrajectoryFieldBuilder(); getReferenceTrajectoryFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; predictedTrajectory_ = null; if (predictedTrajectoryBuilder_ != null) { predictedTrajectoryBuilder_.dispose(); predictedTrajectoryBuilder_ = null; } referenceTrajectory_ = null; if (referenceTrajectoryBuilder_ != null) { referenceTrajectoryBuilder_.dispose(); referenceTrajectoryBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1beta1_TrajectoryRecallInstance_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance build() { com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance buildPartial() { com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance result = new com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.predictedTrajectory_ = predictedTrajectoryBuilder_ == null ? predictedTrajectory_ : predictedTrajectoryBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.referenceTrajectory_ = referenceTrajectoryBuilder_ == null ? referenceTrajectory_ : referenceTrajectoryBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance) { return mergeFrom((com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance other) { if (other == com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance.getDefaultInstance()) return this; if (other.hasPredictedTrajectory()) { mergePredictedTrajectory(other.getPredictedTrajectory()); } if (other.hasReferenceTrajectory()) { mergeReferenceTrajectory(other.getReferenceTrajectory()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getPredictedTrajectoryFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getReferenceTrajectoryFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.aiplatform.v1beta1.Trajectory predictedTrajectory_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.Trajectory, com.google.cloud.aiplatform.v1beta1.Trajectory.Builder, com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder> predictedTrajectoryBuilder_; /** * * * <pre> * Required. Spec for predicted tool call trajectory. * </pre> * * <code> * optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the predictedTrajectory field is set. */ public boolean hasPredictedTrajectory() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Spec for predicted tool call trajectory. * </pre> * * <code> * optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The predictedTrajectory. */ public com.google.cloud.aiplatform.v1beta1.Trajectory getPredictedTrajectory() { if (predictedTrajectoryBuilder_ == null) { return predictedTrajectory_ == null ? com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance() : predictedTrajectory_; } else { return predictedTrajectoryBuilder_.getMessage(); } } /** * * * <pre> * Required. Spec for predicted tool call trajectory. * </pre> * * <code> * optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setPredictedTrajectory(com.google.cloud.aiplatform.v1beta1.Trajectory value) { if (predictedTrajectoryBuilder_ == null) { if (value == null) { throw new NullPointerException(); } predictedTrajectory_ = value; } else { predictedTrajectoryBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Spec for predicted tool call trajectory. * </pre> * * <code> * optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setPredictedTrajectory( com.google.cloud.aiplatform.v1beta1.Trajectory.Builder builderForValue) { if (predictedTrajectoryBuilder_ == null) { predictedTrajectory_ = builderForValue.build(); } else { predictedTrajectoryBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Spec for predicted tool call trajectory. * </pre> * * <code> * optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergePredictedTrajectory(com.google.cloud.aiplatform.v1beta1.Trajectory value) { if (predictedTrajectoryBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && predictedTrajectory_ != null && predictedTrajectory_ != com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance()) { getPredictedTrajectoryBuilder().mergeFrom(value); } else { predictedTrajectory_ = value; } } else { predictedTrajectoryBuilder_.mergeFrom(value); } if (predictedTrajectory_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. Spec for predicted tool call trajectory. * </pre> * * <code> * optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearPredictedTrajectory() { bitField0_ = (bitField0_ & ~0x00000001); predictedTrajectory_ = null; if (predictedTrajectoryBuilder_ != null) { predictedTrajectoryBuilder_.dispose(); predictedTrajectoryBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. Spec for predicted tool call trajectory. * </pre> * * <code> * optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1beta1.Trajectory.Builder getPredictedTrajectoryBuilder() { bitField0_ |= 0x00000001; onChanged(); return getPredictedTrajectoryFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Spec for predicted tool call trajectory. * </pre> * * <code> * optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder getPredictedTrajectoryOrBuilder() { if (predictedTrajectoryBuilder_ != null) { return predictedTrajectoryBuilder_.getMessageOrBuilder(); } else { return predictedTrajectory_ == null ? com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance() : predictedTrajectory_; } } /** * * * <pre> * Required. Spec for predicted tool call trajectory. * </pre> * * <code> * optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.Trajectory, com.google.cloud.aiplatform.v1beta1.Trajectory.Builder, com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder> getPredictedTrajectoryFieldBuilder() { if (predictedTrajectoryBuilder_ == null) { predictedTrajectoryBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.Trajectory, com.google.cloud.aiplatform.v1beta1.Trajectory.Builder, com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder>( getPredictedTrajectory(), getParentForChildren(), isClean()); predictedTrajectory_ = null; } return predictedTrajectoryBuilder_; } private com.google.cloud.aiplatform.v1beta1.Trajectory referenceTrajectory_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.Trajectory, com.google.cloud.aiplatform.v1beta1.Trajectory.Builder, com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder> referenceTrajectoryBuilder_; /** * * * <pre> * Required. Spec for reference tool call trajectory. * </pre> * * <code> * optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the referenceTrajectory field is set. */ public boolean hasReferenceTrajectory() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Spec for reference tool call trajectory. * </pre> * * <code> * optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The referenceTrajectory. */ public com.google.cloud.aiplatform.v1beta1.Trajectory getReferenceTrajectory() { if (referenceTrajectoryBuilder_ == null) { return referenceTrajectory_ == null ? com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance() : referenceTrajectory_; } else { return referenceTrajectoryBuilder_.getMessage(); } } /** * * * <pre> * Required. Spec for reference tool call trajectory. * </pre> * * <code> * optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setReferenceTrajectory(com.google.cloud.aiplatform.v1beta1.Trajectory value) { if (referenceTrajectoryBuilder_ == null) { if (value == null) { throw new NullPointerException(); } referenceTrajectory_ = value; } else { referenceTrajectoryBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Spec for reference tool call trajectory. * </pre> * * <code> * optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setReferenceTrajectory( com.google.cloud.aiplatform.v1beta1.Trajectory.Builder builderForValue) { if (referenceTrajectoryBuilder_ == null) { referenceTrajectory_ = builderForValue.build(); } else { referenceTrajectoryBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Spec for reference tool call trajectory. * </pre> * * <code> * optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeReferenceTrajectory(com.google.cloud.aiplatform.v1beta1.Trajectory value) { if (referenceTrajectoryBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && referenceTrajectory_ != null && referenceTrajectory_ != com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance()) { getReferenceTrajectoryBuilder().mergeFrom(value); } else { referenceTrajectory_ = value; } } else { referenceTrajectoryBuilder_.mergeFrom(value); } if (referenceTrajectory_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. Spec for reference tool call trajectory. * </pre> * * <code> * optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearReferenceTrajectory() { bitField0_ = (bitField0_ & ~0x00000002); referenceTrajectory_ = null; if (referenceTrajectoryBuilder_ != null) { referenceTrajectoryBuilder_.dispose(); referenceTrajectoryBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. Spec for reference tool call trajectory. * </pre> * * <code> * optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1beta1.Trajectory.Builder getReferenceTrajectoryBuilder() { bitField0_ |= 0x00000002; onChanged(); return getReferenceTrajectoryFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Spec for reference tool call trajectory. * </pre> * * <code> * optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder getReferenceTrajectoryOrBuilder() { if (referenceTrajectoryBuilder_ != null) { return referenceTrajectoryBuilder_.getMessageOrBuilder(); } else { return referenceTrajectory_ == null ? com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance() : referenceTrajectory_; } } /** * * * <pre> * Required. Spec for reference tool call trajectory. * </pre> * * <code> * optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.Trajectory, com.google.cloud.aiplatform.v1beta1.Trajectory.Builder, com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder> getReferenceTrajectoryFieldBuilder() { if (referenceTrajectoryBuilder_ == null) { referenceTrajectoryBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.Trajectory, com.google.cloud.aiplatform.v1beta1.Trajectory.Builder, com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder>( getReferenceTrajectory(), getParentForChildren(), isClean()); referenceTrajectory_ = null; } return referenceTrajectoryBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance) private static final com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance(); } public static com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<TrajectoryRecallInstance> PARSER = new com.google.protobuf.AbstractParser<TrajectoryRecallInstance>() { @java.lang.Override public TrajectoryRecallInstance parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<TrajectoryRecallInstance> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<TrajectoryRecallInstance> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInstance getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/derby
36,192
java/org.apache.derby.tests/org/apache/derbyTesting/functionTests/tests/upgradeTests/Changes10_7.java
/* Derby - Class org.apache.derbyTesting.functionTests.tests.upgradeTests.Changes10_7 Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derbyTesting.functionTests.tests.upgradeTests; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.SQLWarning; import java.sql.Statement; import java.util.HashSet; import java.util.Set; import junit.framework.Test; import org.apache.derbyTesting.junit.BaseTestSuite; import org.apache.derbyTesting.junit.JDBC; import org.apache.derbyTesting.junit.SupportFilesSetup; /** * Upgrade test cases for 10.7. * If the old version is 10.7 or later then these tests * will not be run. * <BR> 10.7 Upgrade issues <UL> <LI>BOOLEAN data type support expanded.</LI> </UL> */ public class Changes10_7 extends UpgradeChange { /////////////////////////////////////////////////////////////////////////////////// // // CONSTANTS // /////////////////////////////////////////////////////////////////////////////////// private static final String SYNTAX_ERROR = "42X01"; private static final String UPGRADE_REQUIRED = "XCL47"; private static final String GRANT_REVOKE_WITH_LEGACY_ACCESS = "42Z60"; /////////////////////////////////////////////////////////////////////////////////// // // STATE // /////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////// // // CONSTRUCTOR // /////////////////////////////////////////////////////////////////////////////////// public Changes10_7(String name) { super(name); } /////////////////////////////////////////////////////////////////////////////////// // // JUnit BEHAVIOR // /////////////////////////////////////////////////////////////////////////////////// /** * Return the suite of tests to test the changes made in 10.7. * @param phase an integer that indicates the current phase in * the upgrade test. * @return the test suite created. */ public static Test suite(int phase) { BaseTestSuite suite = new BaseTestSuite("Upgrade test for 10.7"); suite.addTestSuite(Changes10_7.class); return new SupportFilesSetup((Test) suite); } /////////////////////////////////////////////////////////////////////////////////// // // TESTS // /////////////////////////////////////////////////////////////////////////////////// /** * Make sure that that database is at level 10.7 in order to enjoy * extended support for the BOOLEAN datatype. */ public void testBoolean() throws SQLException { String booleanValuedFunction = "create function f_4655( a varchar( 100 ) ) returns boolean\n" + "language java parameter style java no sql deterministic\n" + "external name 'Z.getBooleanValue'\n"; Statement s = createStatement(); switch ( getPhase() ) { case PH_CREATE: // create with old version case PH_POST_SOFT_UPGRADE: // soft-downgrade: boot with old version after soft-upgrade assertFalse(getSupportedTypes().contains("BOOLEAN")); assertStatementError( SYNTAX_ERROR, s, booleanValuedFunction ); break; case PH_SOFT_UPGRADE: // boot with new version and soft-upgrade assertFalse(getSupportedTypes().contains("BOOLEAN")); assertStatementError( UPGRADE_REQUIRED, s, booleanValuedFunction ); break; case PH_HARD_UPGRADE: // boot with new version and hard-upgrade assertTrue(getSupportedTypes().contains("BOOLEAN")); s.execute( booleanValuedFunction ); break; } s.close(); } /** * Get the names of all supported types, as reported by * {@code DatabaseMetaData.getTypeInfo()}. * * @return a set with the names of all supported types in the loaded * version of Derby */ private Set<String> getSupportedTypes() throws SQLException { HashSet<String> types = new HashSet<String>(); ResultSet rs = getConnection().getMetaData().getTypeInfo(); while (rs.next()) { types.add(rs.getString("TYPE_NAME")); } rs.close(); return types; } /** * This test creates 2 kinds of triggers in old release for each of the * three phase of upgrade. The triggers are of following 2 types * 1)trigger action using columns available through the REFERENCING clause. * 2)trigger action using columns without the REFERENCING clause. * For both kinds of triggers, there is test case which drops the column * being used in the trigger action column. * * In all three modes of upgrade, soft upgrade, post soft upgrade, and * hard upgrade, ALTER TABLE DROP COLUMN will detect the trigger * dependency. */ public void testAlterTableDropColumnAndTriggerAction() throws Exception { // ALTER TABLE DROP COLUMN was introduced in 10.3 so no point running // this test with earlier releases if (!oldAtLeast(10, 3)) return; Statement s = createStatement(); ResultSet rs; switch ( getPhase() ) { case PH_CREATE: // create with old version // Create 4 tables for each of the upgrade phases // // There will be 2 tests in each upgrade phase. // 1)One test will use the column being dropped as part of the // trigger action column through the REFERENCING clause // 2)Second test will use the column being dropped as part of the // trigger action sql without the REFERENCING clause // //For each of the two tests, one table will be used for //ALTER TABLE DROP COLUMN RESTRICT and the second table will //be used for ALTER TABLE DROP COLUMN CASCADE //Following 4 tables and triggers will be used in soft upgrade mode // The trigger actions on following 2 table use a column through // REFERENCING clause createTableAndTrigger("TAB1_SOFT_UPGRADE_RESTRICT", "TAB1_SOFT_UPGRADE_RESTRICT_TR1", true); createTableAndTrigger("TAB1_SOFT_UPGRADE_CASCADE", "TAB1_SOFT_UPGRADE_CASCADE_TR1", true); // The trigger actions on following 2 table use a column without // the REFERENCING clause createTableAndTrigger("TAB2_SOFT_UPGRADE_RESTRICT", "TAB2_SOFT_UPGRADE_RESTRICT_TR1", false); createTableAndTrigger("TAB2_SOFT_UPGRADE_CASCADE", "TAB2_SOFT_UPGRADE_CASCADE_TR1", false); //Following 4 tables and triggers will be used in post-soft // upgrade mode // The trigger actions on following 2 table use a column through // REFERENCING clause createTableAndTrigger("TAB1_POSTSOFT_UPGRADE_RESTRICT", "TAB1_POSTSOFT_UPGRADE_RESTRICT_TR1", true); createTableAndTrigger("TAB1_POSTSOFT_UPGRADE_CASCADE", "TAB1_POSTSOFT_UPGRADE_CASCADE_TR1", true); // The trigger actions on following 2 table use a column without // the REFERENCING clause createTableAndTrigger("TAB2_POSTSOFT_UPGRADE_RESTRICT", "TAB2_POSTSOFT_UPGRADE_RESTRICT_TR1", false); createTableAndTrigger("TAB2_POSTSOFT_UPGRADE_CASCADE", "TAB2_POSTSOFT_UPGRADE_CASCADE_TR1", false); //Following 4 tables and triggers will be used in hard // upgrade mode // The trigger actions on following 2 table use a column through // REFERENCING clause createTableAndTrigger("TAB1_HARD_UPGRADE_RESTRICT", "TAB1_HARD_UPGRADE_RESTRICT_TR1", true); createTableAndTrigger("TAB1_HARD_UPGRADE_CASCADE", "TAB1_HARD_UPGRADE_CASCADE_TR1", true); // The trigger actions on following 2 table use a column without // the REFERENCING clause createTableAndTrigger("TAB2_HARD_UPGRADE_RESTRICT", "TAB2_HARD_UPGRADE_RESTRICT_TR1", false); createTableAndTrigger("TAB2_HARD_UPGRADE_CASCADE", "TAB2_HARD_UPGRADE_CASCADE_TR1", false); break; case PH_SOFT_UPGRADE: // boot with new version and soft-upgrade // The trigger has trigger action using the column being dropped // through the REFERENCING clause. Because of this, // DROP COLUMN RESTRICT will fail. assertStatementError("X0Y25", s, " alter table TAB1_SOFT_UPGRADE_RESTRICT " + " drop column c11 restrict"); //Verify that trigger still exists in the system rs = s.executeQuery( " select triggername from sys.systriggers where " + "triggername='TAB1_SOFT_UPGRADE_RESTRICT_TR1'"); JDBC.assertFullResultSet(rs, new String[][]{{"TAB1_SOFT_UPGRADE_RESTRICT_TR1"}}); // The trigger has trigger action using the column being dropped // through the REFERENCING clause. Because of this, // DROP COLUMN CASCADE will drop the dependent trigger. s.executeUpdate("alter table TAB1_SOFT_UPGRADE_CASCADE " + " drop column c11 CASCADE"); checkWarning(s, "01502"); //Verify that the trigger does not exist in the system anymore JDBC.assertEmpty(s.executeQuery( " select triggername from sys.systriggers where " + "triggername='TAB1_SOFT_UPGRADE_CASCADE_TR1'")); // The trigger has trigger action using the column being dropped // (not through the REFERENCING clause). Because of this, // DROP COLUMN RESTRICT will fail. assertStatementError("X0Y25", s, " alter table TAB2_SOFT_UPGRADE_RESTRICT " + " drop column c11 restrict"); //Verify that trigger still exists in the system rs = s.executeQuery( " select triggername from sys.systriggers where " + "triggername='TAB2_SOFT_UPGRADE_RESTRICT_TR1'"); JDBC.assertFullResultSet(rs, new String[][]{{"TAB2_SOFT_UPGRADE_RESTRICT_TR1"}}); // The trigger has trigger action using the column being dropped // (not through the REFERENCING clause). Because of this, // DROP COLUMN CASCADE will drop the dependent trigger. s.executeUpdate("alter table TAB2_SOFT_UPGRADE_CASCADE " + " drop column c11 CASCADE"); checkWarning(s, "01502"); //Verify that the trigger does not exist in the system anymore JDBC.assertEmpty(s.executeQuery( " select triggername from sys.systriggers where " + "triggername='TAB2_SOFT_UPGRADE_CASCADE_TR1'")); // Same behavior can be seen with tables and triggers created // in soft upgrade mode using Derby 10.7 release, // The trigger actions in this test case uses a column through // REFERENCING clause. Because of this, // DROP COLUMN RESTRICT will fail. createTableAndTrigger("TAB1_SOFT_UPGRADE_NEW_TABLE_RESTRICT", "TAB1_SOFT_UPGRADE_NEW_TABLE_RESTRICT_TR1", true); assertStatementError("X0Y25", s, " alter table TAB1_SOFT_UPGRADE_NEW_TABLE_RESTRICT " + " drop column c11 restrict"); //Verify that trigger still exists in the system rs = s.executeQuery( " select triggername from sys.systriggers where " + "triggername='TAB1_SOFT_UPGRADE_NEW_TABLE_RESTRICT_TR1'"); JDBC.assertFullResultSet(rs, new String[][]{{"TAB1_SOFT_UPGRADE_NEW_TABLE_RESTRICT_TR1"}}); // Same behavior can be seen with tables and triggers created // in soft upgrade mode using Derby 10.7 release, // The trigger actions in this test case uses a column through // REFERENCING clause. Because of this, // DROP COLUMN CASCADE will drop the dependent trigger. createTableAndTrigger("TAB1_SOFT_UPGRADE_NEW_TABLE_CASCADE", "TAB1_SOFT_UPGRADE_NEW_TABLE_CASCADE_TR1", true); s.executeUpdate("alter table TAB1_SOFT_UPGRADE_NEW_TABLE_CASCADE " + " drop column c11 CASCADE"); checkWarning(s, "01502"); //Verify that the trigger does not exist in the system anymore JDBC.assertEmpty(s.executeQuery( " select triggername from sys.systriggers where " + "triggername='TAB1_SOFT_UPGRADE_NEW_TABLE_CASCADE_TR1'")); // Same behavior can be seen with tables and triggers created // in soft upgrade mode using Derby 10.7 release, // The trigger actions in this test case uses a column // (not through the REFERENCING clause). Because of this, // DROP COLUMN RESTRICT will fail. createTableAndTrigger("TAB2_SOFT_UPGRADE_NEW_TABLE_RESTRICT", "TAB2_SOFT_UPGRADE_NEW_TABLE_RESTRICT_TR1", false); assertStatementError("X0Y25", s, " alter table TAB2_SOFT_UPGRADE_NEW_TABLE_RESTRICT " + " drop column c11 restrict"); //Verify that trigger still exists in the system rs = s.executeQuery( " select triggername from sys.systriggers where " + "triggername='TAB2_SOFT_UPGRADE_NEW_TABLE_RESTRICT_TR1'"); JDBC.assertFullResultSet(rs, new String[][]{{"TAB2_SOFT_UPGRADE_NEW_TABLE_RESTRICT_TR1"}}); // Same behavior can be seen with tables and triggers created // in soft upgrade mode using Derby 10.7 release, // The trigger actions in this test case uses a column // (not through the REFERENCING clause). Because of this, // DROP COLUMN RESTRICT will fail. createTableAndTrigger("TAB2_SOFT_UPGRADE_NEW_TABLE_CASCADE", "TAB2_SOFT_UPGRADE_NEW_TABLE_CASCADE_TR1", false); s.executeUpdate("alter table TAB2_soft_upgrade_NEW_TABLE_cascade " + " drop column c11 CASCADE"); checkWarning(s, "01502"); //Verify that the trigger does not exist in the system anymore JDBC.assertEmpty(s.executeQuery( " select triggername from sys.systriggers where " + "triggername='TAB2_SOFT_UPGRADE_NEW_TABLE_CASCADE_TR1'")); break; case PH_POST_SOFT_UPGRADE: // soft-downgrade: boot with old version after soft-upgrade //The tables created with 10.6 and prior versions will exhibit // incorrect behavior because changes for DERBY-4887/DERBY-4984 // have not been backported to 10.6 and earlier yet // //ALTER TABLE DROP COLUMN will not detect column being dropped // in trigger action of dependent triggers. incorrectBehaviorForDropColumn("TAB1_POSTSOFT_UPGRADE_RESTRICT", "TAB1_POSTSOFT_UPGRADE_RESTRICT_TR1", "RESTRICT"); incorrectBehaviorForDropColumn("TAB1_POSTSOFT_UPGRADE_CASCADE", "TAB1_POSTSOFT_UPGRADE_CASCADE_TR1", "CASCADE"); incorrectBehaviorForDropColumn("TAB2_POSTSOFT_UPGRADE_RESTRICT", "TAB2_POSTSOFT_UPGRADE_RESTRICT_TR1", "RESTRICT"); incorrectBehaviorForDropColumn("TAB2_POSTSOFT_UPGRADE_CASCADE", "TAB2_POSTSOFT_UPGRADE_CASCADE_TR1", "CASCADE"); //We are back to pre-10.7 version after the soft upgrade. //ALTER TABLE DROP COLUMN will continue to behave incorrectly //and will not detect the trigger actions referencing the column //being dropped through the REFERENCING clause createTableAndTrigger("TAB1_POST_SOFT_UPGRADE_NEW_TABLE_RESTRICT", "TAB1_POST_SOFT_UPGRADE_NEW_TABLE_RESTRICT_TR1", true); incorrectBehaviorForDropColumn("TAB1_POST_SOFT_UPGRADE_NEW_TABLE_RESTRICT", "TAB1_POST_SOFT_UPGRADE_NEW_TABLE_RESTRICT_TR1", "RESTRICT"); createTableAndTrigger("TAB1_POST_SOFT_UPGRADE_NEW_TABLE_CASCADE", "TAB1_POST_SOFT_UPGRADE_NEW_TABLE_CASCADE_TR1", true); incorrectBehaviorForDropColumn("TAB1_POST_SOFT_UPGRADE_NEW_TABLE_CASCADE", "TAB1_POST_SOFT_UPGRADE_NEW_TABLE_CASCADE_TR1", "RESTRICT"); //We are back to pre-10.7 version after the soft upgrade. //ALTER TABLE DROP COLUMN will continue to behave incorrectly //and will not detect the trigger actions referencing the column //being dropped directly (ie without the REFERENCING clause) createTableAndTrigger("TAB2_POST_SOFT_UPGRADE_NEW_TABLE_RESTRICT", "TAB2_POST_SOFT_UPGRADE_NEW_TABLE_RESTRICT_TR1", false); incorrectBehaviorForDropColumn("TAB2_POST_SOFT_UPGRADE_NEW_TABLE_RESTRICT", "TAB2_POST_SOFT_UPGRADE_NEW_TABLE_RESTRICT_TR1", "RESTRICT"); createTableAndTrigger("TAB2_POST_SOFT_UPGRADE_NEW_TABLE_CASCADE", "TAB2_POST_SOFT_UPGRADE_NEW_TABLE_CASCADE_TR1", false); incorrectBehaviorForDropColumn("TAB2_POST_SOFT_UPGRADE_NEW_TABLE_CASCADE", "TAB2_POST_SOFT_UPGRADE_NEW_TABLE_CASCADE_TR1", "RESTRICT"); break; case PH_HARD_UPGRADE: // boot with new version and hard-upgrade // The trigger has trigger action using the column being dropped // through the REFERENCING clause. Because of this, // DROP COLUMN RESTRICT will fail. assertStatementError("X0Y25", s, " alter table TAB1_HARD_UPGRADE_RESTRICT " + " drop column c11 restrict"); //Verify that trigger still exists in the system rs = s.executeQuery( " select triggername from sys.systriggers where " + "triggername='TAB1_HARD_UPGRADE_RESTRICT_TR1'"); JDBC.assertFullResultSet(rs, new String[][]{{"TAB1_HARD_UPGRADE_RESTRICT_TR1"}}); // The trigger has trigger action using the column being dropped // through the REFERENCING clause. Because of this, // DROP COLUMN CASCADE will drop the dependent trigger. s.executeUpdate("alter table TAB1_HARD_UPGRADE_CASCADE " + " drop column c11 CASCADE"); checkWarning(s, "01502"); //Verify that the trigger does not exist in the system anymore JDBC.assertEmpty(s.executeQuery( " select triggername from sys.systriggers where " + "triggername='TAB1_HARD_UPGRADE_CASCADE_TR1'")); // The trigger has trigger action using the column being dropped // (not through the REFERENCING clause). Because of this, // DROP COLUMN RESTRICT will fail. assertStatementError("X0Y25", s, " alter table TAB2_HARD_UPGRADE_RESTRICT " + " drop column c11 restrict"); //Verify that trigger still exists in the system rs = s.executeQuery( " select triggername from sys.systriggers where " + "triggername='TAB2_HARD_UPGRADE_RESTRICT_TR1'"); JDBC.assertFullResultSet(rs, new String[][]{{"TAB2_HARD_UPGRADE_RESTRICT_TR1"}}); // The trigger has trigger action using the column being dropped // (not through the REFERENCING clause). Because of this, // DROP COLUMN CASCADE will drop the dependent trigger. s.executeUpdate("alter table TAB2_HARD_UPGRADE_CASCADE " + " drop column c11 CASCADE"); checkWarning(s, "01502"); //Verify that the trigger does not exist in the system anymore JDBC.assertEmpty(s.executeQuery( " select triggername from sys.systriggers where " + "triggername='TAB2_HARD_UPGRADE_CASCADE_TR1'")); //Create 2 new tables now that the database has been upgraded. //Notice that newly created tables will be able to detect //trigger action reference to column through REFERENCING clause. createTableAndTrigger("TAB1_HARD_UPGRADE_NEW_TABLE_RESTRICT", "TAB1_HARD_UPGRADE_NEW_TABLE_RESTRICT_TR1", true); assertStatementError("X0Y25", s, " alter table TAB1_HARD_UPGRADE_NEW_TABLE_RESTRICT " + " drop column c11 restrict"); //Verify that trigger still exists in the system rs = s.executeQuery( " select triggername from sys.systriggers where " + "triggername='TAB1_HARD_UPGRADE_NEW_TABLE_RESTRICT_TR1'"); JDBC.assertFullResultSet(rs, new String[][]{{"TAB1_HARD_UPGRADE_NEW_TABLE_RESTRICT_TR1"}}); // The trigger has trigger action using the column being dropped // through the REFERENCING clause. Because of this, // DROP COLUMN CASCADE will drop the dependent trigger. createTableAndTrigger("TAB1_HARD_UPGRADE_NEW_TABLE_CASCADE", "TAB1_HARD_UPGRADE_NEW_TABLE_CASCADE_TR1", true); s.executeUpdate("alter table TAB1_HARD_UPGRADE_NEW_TABLE_CASCADE " + " drop column c11 CASCADE"); checkWarning(s, "01502"); //Verify that the trigger does not exist in the system anymore JDBC.assertEmpty(s.executeQuery( " select triggername from sys.systriggers where " + "triggername='TAB1_HARD_UPGRADE_NEW_TABLE_CASCADE_TR1'")); //Create 2 new tables now that the database has been upgraded. // Notice that newly created tables will be able to detect // trigger action column (without the REFERENCING clause.) //Because of this, DROP COLUMN RESTRICT will fail. createTableAndTrigger("TAB2_HARD_UPGRADE_NEW_TABLE_RESTRICT", "TAB2_HARD_UPGRADE_NEW_TABLE_RESTRICT_TR1", false); //Verify that trigger still exists in the system assertStatementError("X0Y25", s, " alter table TAB2_HARD_UPGRADE_NEW_TABLE_RESTRICT " + " drop column c11 restrict"); rs = s.executeQuery( " select triggername from sys.systriggers where " + "triggername='TAB2_HARD_UPGRADE_NEW_TABLE_RESTRICT_TR1'"); JDBC.assertFullResultSet(rs, new String[][]{{"TAB2_HARD_UPGRADE_NEW_TABLE_RESTRICT_TR1"}}); // The trigger has trigger action using the column being dropped // (not through the REFERENCING clause). Because of this, // DROP COLUMN CASCADE will drop the dependent trigger. createTableAndTrigger("TAB2_HARD_UPGRADE_NEW_TABLE_CASCADE", "TAB2_HARD_UPGRADE_NEW_TABLE_CASCADE_TR1", false); s.executeUpdate("alter table TAB2_HARD_UPGRADE_NEW_TABLE_CASCADE " + " drop column c11 CASCADE"); checkWarning(s, "01502"); //Verify that the trigger does not exist in the system anymore JDBC.assertEmpty(s.executeQuery( " select triggername from sys.systriggers where " + "triggername='TAB2_HARD_UPGRADE_NEW_TABLE_CASCADE_TR1'")); break; } } //Create the table and trigger necessary for ALTER TABLE DROP COLUMN test private void createTableAndTrigger(String tableName, String triggerName, boolean usesReferencingClause) throws SQLException { Statement s = createStatement(); ResultSet rs; s.execute("CREATE TABLE " + tableName + " (c11 int, c12 int) "); s.execute("INSERT INTO " + tableName + " VALUES (1,10)"); s.execute("CREATE TRIGGER " + triggerName + " AFTER UPDATE OF c12 ON " + tableName + (usesReferencingClause ? " REFERENCING OLD AS oldt" : "" )+ " FOR EACH ROW SELECT " + (usesReferencingClause ? "oldt.c11 " : "c11 " )+ "FROM " + tableName); s.executeUpdate("UPDATE " + tableName + " SET c12=c12+1"); } //ALTER TABLE DROP COLUMN in not detected the trigger column dependency for //columns being used through the REFERENCING clause for triggers created //prior to 10.7 release private void incorrectBehaviorForDropColumn(String tableName, String triggerName, String restrictOrCascade) throws SQLException { Statement s = createStatement(); ResultSet rs; //ALTER TABLE DROP COLUMN of a column used in the trigger action //through REFERENCING clause does not detect the trigger //dependency in older releases. //RESTRICT won't give any error for dependent trigger and will //drop column c11 even though it is getting used in trigger action //and will leave the invalid trigger in the system. //CASCADE won't give any warning for dependent trigger and will //drop column c11 even though it is getting used in trigger action //and will leave the invalid trigger in the system. s.executeUpdate("ALTER TABLE " + tableName + " DROP COLUMN c11 " + restrictOrCascade); rs = s.executeQuery( " select triggername from sys.systriggers where " + "triggername='" + triggerName + "'"); JDBC.assertFullResultSet(rs, new String[][]{{triggerName}}); } private void checkWarning(Statement st, String expectedWarning) throws Exception { SQLWarning sqlWarn = null; sqlWarn = st.getWarnings(); if (sqlWarn == null) { sqlWarn = getConnection().getWarnings(); } assertNotNull("Expected warning but found none", sqlWarn); assertSQLState(expectedWarning, sqlWarn); } /** * Make sure that DERBY-1482 changes do not break backward compatibility */ public void testTriggers() throws SQLException { Statement s = createStatement(); ResultSet rs; boolean modeDb2SqlOptional = oldAtLeast(10, 3); switch ( getPhase() ) { case PH_CREATE: // create with old version s.execute("CREATE TABLE DERBY1482_table1(c11 int, c12 int)"); s.execute("INSERT INTO DERBY1482_table1 VALUES (1,10)"); s.execute("CREATE TABLE DERBY1482_table2(c21 int, c22 int)"); s.execute("CREATE TABLE DERBY1482_table3(c31 int, c32 int)"); s.execute("CREATE TABLE DERBY1482_table4(c41 int, c42 int)"); s.execute("CREATE TABLE DERBY1482_table5(c51 int, c52 int)"); //Create the first trigger in the older release where the //database has been created. Every update of DERBY1482_table1.c12 //will cause an insert into DERBY1482_table2 through this trigger tr1. s.execute("CREATE TRIGGER tr1 AFTER UPDATE OF c12 " + "ON DERBY1482_table1 REFERENCING OLD AS oldt " + "FOR EACH ROW " + (modeDb2SqlOptional?"":"MODE DB2SQL ") + "INSERT INTO DERBY1482_table2 VALUES(-1, oldt.c12)"); //Now do an update which will fire trigger tr1 s.executeUpdate("UPDATE DERBY1482_table1 SET c12=-1 WHERE c11=1"); //Verify that trigger tr1 has inserted one row in DERBY1482_table2 rs = s.executeQuery("SELECT COUNT(*) FROM DERBY1482_table2"); JDBC.assertFullResultSet(rs, new String[][]{{"1"}}); break; case PH_SOFT_UPGRADE: // boot with new version and soft-upgrade //Now do an update while in the soft upgrade. This should //fire trigger tr1 s.executeUpdate("UPDATE DERBY1482_table1 SET c12=-1 WHERE c11=1"); //Verify that now we have 2 rows in DERBY1482_table2 because trigger tr1 //has fired twice so far. Once in PH_CREATE phase and once //in PH_SOFT_UPGRADE phase rs = s.executeQuery("SELECT COUNT(*) FROM DERBY1482_table2"); JDBC.assertFullResultSet(rs, new String[][]{{"2"}}); //Create trigger tr2 in soft upgrade mode. DERBY-1482 changes //will not put anything about trigger action columns in //SYSTRIGGERS to maintain backward compatibility. Only 10.7 //and up recognize additional information about trigger action //columns in SYSTRIGGERS. s.execute("CREATE TRIGGER tr2 AFTER UPDATE OF c12 ON DERBY1482_table1 " + "REFERENCING OLD AS oldt FOR EACH ROW " + (modeDb2SqlOptional?"":"MODE DB2SQL ") + "INSERT INTO DERBY1482_table3 VALUES(-1, oldt.c12)"); //Now do an update which will fire triggers tr1 and tr2 s.executeUpdate("UPDATE DERBY1482_table1 SET c12=-1 WHERE c11=1"); //Verify that trigger tr1 has inserted one more row in DERBY1482_table2 rs = s.executeQuery("SELECT COUNT(*) FROM DERBY1482_table2"); JDBC.assertFullResultSet(rs, new String[][]{{"3"}}); //Verify that trigger tr2 has inserted one row in DERBY1482_table3 rs = s.executeQuery("SELECT COUNT(*) FROM DERBY1482_table3"); JDBC.assertFullResultSet(rs, new String[][]{{"1"}}); break; case PH_POST_SOFT_UPGRADE: // soft-downgrade: boot with old version after soft-upgrade //Now do an update when we are back with the older release //after the soft upgrade. This should fire trigger tr1 and tr2 s.executeUpdate("UPDATE DERBY1482_table1 SET c12=-1 WHERE c11=1"); //Verify that now we have 4 rows in DERBY1482_table2 and 2 rows in DERBY1482_table3 rs = s.executeQuery("SELECT COUNT(*) FROM DERBY1482_table2"); JDBC.assertFullResultSet(rs, new String[][]{{"4"}}); rs = s.executeQuery("SELECT COUNT(*) FROM DERBY1482_table3"); JDBC.assertFullResultSet(rs, new String[][]{{"2"}}); //Create trigger tr3 with the older release. Triggers created in //soft-upgrade mode and with older release should work fine. s.execute("CREATE TRIGGER tr3 AFTER UPDATE OF c12 ON DERBY1482_table1 " + "REFERENCING OLD AS oldt FOR EACH ROW " + (modeDb2SqlOptional?"":"MODE DB2SQL ") + "INSERT INTO DERBY1482_table4 VALUES(-1, oldt.c12)"); //Now do an update which will fire triggers tr1, tr2 and tr3 s.executeUpdate("UPDATE DERBY1482_table1 SET c12=-1 WHERE c11=1"); //Verify that trigger tr1 has inserted one more row in DERBY1482_table2 rs = s.executeQuery("SELECT COUNT(*) FROM DERBY1482_table2"); JDBC.assertFullResultSet(rs, new String[][]{{"5"}}); //Verify that trigger tr2 has inserted one more row in DERBY1482_table3 rs = s.executeQuery("SELECT COUNT(*) FROM DERBY1482_table3"); JDBC.assertFullResultSet(rs, new String[][]{{"3"}}); //Verify that trigger tr3 has inserted one row in DERBY1482_table4 rs = s.executeQuery("SELECT COUNT(*) FROM DERBY1482_table4"); JDBC.assertFullResultSet(rs, new String[][]{{"1"}}); break; case PH_HARD_UPGRADE: // boot with new version and hard-upgrade //Do an update after we have hard upgraded to 10.7 and make sure //that all the triggers (created with older release and created //in soft-upgrade mode) work fine. s.executeUpdate("UPDATE DERBY1482_table1 SET c12=-1 WHERE c11=1"); //Verify that now we have 6 rows in DERBY1482_table2, 4 rows in DERBY1482_table3, 2 rows in DERBY1482_table4 rs = s.executeQuery("SELECT COUNT(*) FROM DERBY1482_table2"); JDBC.assertFullResultSet(rs, new String[][]{{"6"}}); rs = s.executeQuery("SELECT COUNT(*) FROM DERBY1482_table3"); JDBC.assertFullResultSet(rs, new String[][]{{"4"}}); rs = s.executeQuery("SELECT COUNT(*) FROM DERBY1482_table4"); JDBC.assertFullResultSet(rs, new String[][]{{"2"}}); //Create trigger DERBY1482_table4 in the hard-upgraded db. s.execute("CREATE TRIGGER tr4 AFTER UPDATE OF c12 ON DERBY1482_table1 " + "REFERENCING OLD AS oldt FOR EACH ROW " + (modeDb2SqlOptional?"":"MODE DB2SQL ") + "INSERT INTO DERBY1482_table5 VALUES(-1, oldt.c12)"); //All 4 triggers tr1, tr2, tr3 and tr4 should fire //Now do an update which will fire all 4 triggers tr1,tr2,tr3,tr4 s.executeUpdate("UPDATE DERBY1482_table1 SET c12=-1 WHERE c11=1"); //Verify that trigger tr1 has inserted one more row in DERBY1482_table2 rs = s.executeQuery("SELECT COUNT(*) FROM DERBY1482_table2"); JDBC.assertFullResultSet(rs, new String[][]{{"7"}}); //Verify that trigger tr2 has inserted one more row in DERBY1482_table3 rs = s.executeQuery("SELECT COUNT(*) FROM DERBY1482_table3"); JDBC.assertFullResultSet(rs, new String[][]{{"5"}}); //Verify that trigger tr3 has inserted one more row in DERBY1482_table4 rs = s.executeQuery("SELECT COUNT(*) FROM DERBY1482_table4"); JDBC.assertFullResultSet(rs, new String[][]{{"3"}}); //Verify that trigger tr4 has inserted one row in DERBY1482_table5 rs = s.executeQuery("SELECT COUNT(*) FROM DERBY1482_table5"); JDBC.assertFullResultSet(rs, new String[][]{{"1"}}); break; } s.close(); } /** * Make sure that that database is at level 10.7 in order to enjoy * routines with specified EXTERNAL SECURITY INVOKER or DEFINER. */ public void testExternalSecuritySpecification() throws SQLException { String functionWithDefinersRights = "create function f_4551( a varchar( 100 ) ) returns int\n" + "language java parameter style java reads sql data\n" + "external security definer\n" + "external name 'Z.getIntValue'\n"; Statement s = createStatement(); switch ( getPhase() ) { case PH_CREATE: // create with old version case PH_POST_SOFT_UPGRADE: // soft-downgrade: boot with old version after soft-upgrade assertStatementError( SYNTAX_ERROR, s, functionWithDefinersRights ); break; case PH_SOFT_UPGRADE: // boot with new version and soft-upgrade assertStatementError( UPGRADE_REQUIRED, s, functionWithDefinersRights ); break; case PH_HARD_UPGRADE: // boot with new version and hard-upgrade. // Syntax now accepted and dictionary level ok, but // sqlAuthorization not enabled (a priori) - expected. assertStatementError(GRANT_REVOKE_WITH_LEGACY_ACCESS, s, functionWithDefinersRights ); break; } s.close(); } }
googleapis/google-cloud-java
36,545
java-java-shopping-merchant-issue-resolution/proto-google-shopping-merchant-issue-resolution-v1/src/main/java/com/google/shopping/merchant/issueresolution/v1/Callout.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/shopping/merchant/issueresolution/v1/issueresolution.proto // Protobuf Java Version: 3.25.8 package com.google.shopping.merchant.issueresolution.v1; /** * * * <pre> * An important message that should be highlighted. Usually displayed as a * banner. * </pre> * * Protobuf type {@code google.shopping.merchant.issueresolution.v1.Callout} */ public final class Callout extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.shopping.merchant.issueresolution.v1.Callout) CalloutOrBuilder { private static final long serialVersionUID = 0L; // Use Callout.newBuilder() to construct. private Callout(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Callout() { styleHint_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new Callout(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.issueresolution.v1.IssueResolutionProto .internal_static_google_shopping_merchant_issueresolution_v1_Callout_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.issueresolution.v1.IssueResolutionProto .internal_static_google_shopping_merchant_issueresolution_v1_Callout_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.issueresolution.v1.Callout.class, com.google.shopping.merchant.issueresolution.v1.Callout.Builder.class); } /** * * * <pre> * Enum specifying the suggested style, how the message should be rendered. * </pre> * * Protobuf enum {@code google.shopping.merchant.issueresolution.v1.Callout.CalloutStyleHint} */ public enum CalloutStyleHint implements com.google.protobuf.ProtocolMessageEnum { /** * * * <pre> * Default value. Will never be provided by the API. * </pre> * * <code>CALLOUT_STYLE_HINT_UNSPECIFIED = 0;</code> */ CALLOUT_STYLE_HINT_UNSPECIFIED(0), /** * * * <pre> * The most important type of information highlighting problems, like an * unsuccessful outcome of previously requested actions. * </pre> * * <code>ERROR = 1;</code> */ ERROR(1), /** * * * <pre> * Information warning about pending problems, risks or deadlines. * </pre> * * <code>WARNING = 2;</code> */ WARNING(2), /** * * * <pre> * Default severity for important information like pending status of * previously requested action or cooldown for re-review. * </pre> * * <code>INFO = 3;</code> */ INFO(3), UNRECOGNIZED(-1), ; /** * * * <pre> * Default value. Will never be provided by the API. * </pre> * * <code>CALLOUT_STYLE_HINT_UNSPECIFIED = 0;</code> */ public static final int CALLOUT_STYLE_HINT_UNSPECIFIED_VALUE = 0; /** * * * <pre> * The most important type of information highlighting problems, like an * unsuccessful outcome of previously requested actions. * </pre> * * <code>ERROR = 1;</code> */ public static final int ERROR_VALUE = 1; /** * * * <pre> * Information warning about pending problems, risks or deadlines. * </pre> * * <code>WARNING = 2;</code> */ public static final int WARNING_VALUE = 2; /** * * * <pre> * Default severity for important information like pending status of * previously requested action or cooldown for re-review. * </pre> * * <code>INFO = 3;</code> */ public static final int INFO_VALUE = 3; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static CalloutStyleHint valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static CalloutStyleHint forNumber(int value) { switch (value) { case 0: return CALLOUT_STYLE_HINT_UNSPECIFIED; case 1: return ERROR; case 2: return WARNING; case 3: return INFO; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<CalloutStyleHint> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<CalloutStyleHint> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<CalloutStyleHint>() { public CalloutStyleHint findValueByNumber(int number) { return CalloutStyleHint.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.shopping.merchant.issueresolution.v1.Callout.getDescriptor() .getEnumTypes() .get(0); } private static final CalloutStyleHint[] VALUES = values(); public static CalloutStyleHint valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private CalloutStyleHint(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.shopping.merchant.issueresolution.v1.Callout.CalloutStyleHint) } private int bitField0_; public static final int STYLE_HINT_FIELD_NUMBER = 1; private int styleHint_ = 0; /** * * * <pre> * Can be used to render messages with different severity in different styles. * Snippets off all types contain important information that should be * displayed to the business. * </pre> * * <code>.google.shopping.merchant.issueresolution.v1.Callout.CalloutStyleHint style_hint = 1; * </code> * * @return The enum numeric value on the wire for styleHint. */ @java.lang.Override public int getStyleHintValue() { return styleHint_; } /** * * * <pre> * Can be used to render messages with different severity in different styles. * Snippets off all types contain important information that should be * displayed to the business. * </pre> * * <code>.google.shopping.merchant.issueresolution.v1.Callout.CalloutStyleHint style_hint = 1; * </code> * * @return The styleHint. */ @java.lang.Override public com.google.shopping.merchant.issueresolution.v1.Callout.CalloutStyleHint getStyleHint() { com.google.shopping.merchant.issueresolution.v1.Callout.CalloutStyleHint result = com.google.shopping.merchant.issueresolution.v1.Callout.CalloutStyleHint.forNumber( styleHint_); return result == null ? com.google.shopping.merchant.issueresolution.v1.Callout.CalloutStyleHint.UNRECOGNIZED : result; } public static final int FULL_MESSAGE_FIELD_NUMBER = 3; private com.google.shopping.merchant.issueresolution.v1.TextWithTooltip fullMessage_; /** * * * <pre> * A full message that needs to be shown to the business. * </pre> * * <code>.google.shopping.merchant.issueresolution.v1.TextWithTooltip full_message = 3;</code> * * @return Whether the fullMessage field is set. */ @java.lang.Override public boolean hasFullMessage() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * A full message that needs to be shown to the business. * </pre> * * <code>.google.shopping.merchant.issueresolution.v1.TextWithTooltip full_message = 3;</code> * * @return The fullMessage. */ @java.lang.Override public com.google.shopping.merchant.issueresolution.v1.TextWithTooltip getFullMessage() { return fullMessage_ == null ? com.google.shopping.merchant.issueresolution.v1.TextWithTooltip.getDefaultInstance() : fullMessage_; } /** * * * <pre> * A full message that needs to be shown to the business. * </pre> * * <code>.google.shopping.merchant.issueresolution.v1.TextWithTooltip full_message = 3;</code> */ @java.lang.Override public com.google.shopping.merchant.issueresolution.v1.TextWithTooltipOrBuilder getFullMessageOrBuilder() { return fullMessage_ == null ? com.google.shopping.merchant.issueresolution.v1.TextWithTooltip.getDefaultInstance() : fullMessage_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (styleHint_ != com.google.shopping.merchant.issueresolution.v1.Callout.CalloutStyleHint .CALLOUT_STYLE_HINT_UNSPECIFIED .getNumber()) { output.writeEnum(1, styleHint_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(3, getFullMessage()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (styleHint_ != com.google.shopping.merchant.issueresolution.v1.Callout.CalloutStyleHint .CALLOUT_STYLE_HINT_UNSPECIFIED .getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, styleHint_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getFullMessage()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.shopping.merchant.issueresolution.v1.Callout)) { return super.equals(obj); } com.google.shopping.merchant.issueresolution.v1.Callout other = (com.google.shopping.merchant.issueresolution.v1.Callout) obj; if (styleHint_ != other.styleHint_) return false; if (hasFullMessage() != other.hasFullMessage()) return false; if (hasFullMessage()) { if (!getFullMessage().equals(other.getFullMessage())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + STYLE_HINT_FIELD_NUMBER; hash = (53 * hash) + styleHint_; if (hasFullMessage()) { hash = (37 * hash) + FULL_MESSAGE_FIELD_NUMBER; hash = (53 * hash) + getFullMessage().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.shopping.merchant.issueresolution.v1.Callout parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.issueresolution.v1.Callout parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.issueresolution.v1.Callout parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.issueresolution.v1.Callout parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.issueresolution.v1.Callout parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.issueresolution.v1.Callout parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.issueresolution.v1.Callout parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.issueresolution.v1.Callout parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.issueresolution.v1.Callout parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.shopping.merchant.issueresolution.v1.Callout parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.issueresolution.v1.Callout parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.issueresolution.v1.Callout parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.shopping.merchant.issueresolution.v1.Callout prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * An important message that should be highlighted. Usually displayed as a * banner. * </pre> * * Protobuf type {@code google.shopping.merchant.issueresolution.v1.Callout} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.shopping.merchant.issueresolution.v1.Callout) com.google.shopping.merchant.issueresolution.v1.CalloutOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.issueresolution.v1.IssueResolutionProto .internal_static_google_shopping_merchant_issueresolution_v1_Callout_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.issueresolution.v1.IssueResolutionProto .internal_static_google_shopping_merchant_issueresolution_v1_Callout_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.issueresolution.v1.Callout.class, com.google.shopping.merchant.issueresolution.v1.Callout.Builder.class); } // Construct using com.google.shopping.merchant.issueresolution.v1.Callout.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getFullMessageFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; styleHint_ = 0; fullMessage_ = null; if (fullMessageBuilder_ != null) { fullMessageBuilder_.dispose(); fullMessageBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.shopping.merchant.issueresolution.v1.IssueResolutionProto .internal_static_google_shopping_merchant_issueresolution_v1_Callout_descriptor; } @java.lang.Override public com.google.shopping.merchant.issueresolution.v1.Callout getDefaultInstanceForType() { return com.google.shopping.merchant.issueresolution.v1.Callout.getDefaultInstance(); } @java.lang.Override public com.google.shopping.merchant.issueresolution.v1.Callout build() { com.google.shopping.merchant.issueresolution.v1.Callout result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.shopping.merchant.issueresolution.v1.Callout buildPartial() { com.google.shopping.merchant.issueresolution.v1.Callout result = new com.google.shopping.merchant.issueresolution.v1.Callout(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.shopping.merchant.issueresolution.v1.Callout result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.styleHint_ = styleHint_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.fullMessage_ = fullMessageBuilder_ == null ? fullMessage_ : fullMessageBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.shopping.merchant.issueresolution.v1.Callout) { return mergeFrom((com.google.shopping.merchant.issueresolution.v1.Callout) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.shopping.merchant.issueresolution.v1.Callout other) { if (other == com.google.shopping.merchant.issueresolution.v1.Callout.getDefaultInstance()) return this; if (other.styleHint_ != 0) { setStyleHintValue(other.getStyleHintValue()); } if (other.hasFullMessage()) { mergeFullMessage(other.getFullMessage()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { styleHint_ = input.readEnum(); bitField0_ |= 0x00000001; break; } // case 8 case 26: { input.readMessage(getFullMessageFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int styleHint_ = 0; /** * * * <pre> * Can be used to render messages with different severity in different styles. * Snippets off all types contain important information that should be * displayed to the business. * </pre> * * <code>.google.shopping.merchant.issueresolution.v1.Callout.CalloutStyleHint style_hint = 1; * </code> * * @return The enum numeric value on the wire for styleHint. */ @java.lang.Override public int getStyleHintValue() { return styleHint_; } /** * * * <pre> * Can be used to render messages with different severity in different styles. * Snippets off all types contain important information that should be * displayed to the business. * </pre> * * <code>.google.shopping.merchant.issueresolution.v1.Callout.CalloutStyleHint style_hint = 1; * </code> * * @param value The enum numeric value on the wire for styleHint to set. * @return This builder for chaining. */ public Builder setStyleHintValue(int value) { styleHint_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Can be used to render messages with different severity in different styles. * Snippets off all types contain important information that should be * displayed to the business. * </pre> * * <code>.google.shopping.merchant.issueresolution.v1.Callout.CalloutStyleHint style_hint = 1; * </code> * * @return The styleHint. */ @java.lang.Override public com.google.shopping.merchant.issueresolution.v1.Callout.CalloutStyleHint getStyleHint() { com.google.shopping.merchant.issueresolution.v1.Callout.CalloutStyleHint result = com.google.shopping.merchant.issueresolution.v1.Callout.CalloutStyleHint.forNumber( styleHint_); return result == null ? com.google.shopping.merchant.issueresolution.v1.Callout.CalloutStyleHint.UNRECOGNIZED : result; } /** * * * <pre> * Can be used to render messages with different severity in different styles. * Snippets off all types contain important information that should be * displayed to the business. * </pre> * * <code>.google.shopping.merchant.issueresolution.v1.Callout.CalloutStyleHint style_hint = 1; * </code> * * @param value The styleHint to set. * @return This builder for chaining. */ public Builder setStyleHint( com.google.shopping.merchant.issueresolution.v1.Callout.CalloutStyleHint value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; styleHint_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Can be used to render messages with different severity in different styles. * Snippets off all types contain important information that should be * displayed to the business. * </pre> * * <code>.google.shopping.merchant.issueresolution.v1.Callout.CalloutStyleHint style_hint = 1; * </code> * * @return This builder for chaining. */ public Builder clearStyleHint() { bitField0_ = (bitField0_ & ~0x00000001); styleHint_ = 0; onChanged(); return this; } private com.google.shopping.merchant.issueresolution.v1.TextWithTooltip fullMessage_; private com.google.protobuf.SingleFieldBuilderV3< com.google.shopping.merchant.issueresolution.v1.TextWithTooltip, com.google.shopping.merchant.issueresolution.v1.TextWithTooltip.Builder, com.google.shopping.merchant.issueresolution.v1.TextWithTooltipOrBuilder> fullMessageBuilder_; /** * * * <pre> * A full message that needs to be shown to the business. * </pre> * * <code>.google.shopping.merchant.issueresolution.v1.TextWithTooltip full_message = 3;</code> * * @return Whether the fullMessage field is set. */ public boolean hasFullMessage() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * A full message that needs to be shown to the business. * </pre> * * <code>.google.shopping.merchant.issueresolution.v1.TextWithTooltip full_message = 3;</code> * * @return The fullMessage. */ public com.google.shopping.merchant.issueresolution.v1.TextWithTooltip getFullMessage() { if (fullMessageBuilder_ == null) { return fullMessage_ == null ? com.google.shopping.merchant.issueresolution.v1.TextWithTooltip.getDefaultInstance() : fullMessage_; } else { return fullMessageBuilder_.getMessage(); } } /** * * * <pre> * A full message that needs to be shown to the business. * </pre> * * <code>.google.shopping.merchant.issueresolution.v1.TextWithTooltip full_message = 3;</code> */ public Builder setFullMessage( com.google.shopping.merchant.issueresolution.v1.TextWithTooltip value) { if (fullMessageBuilder_ == null) { if (value == null) { throw new NullPointerException(); } fullMessage_ = value; } else { fullMessageBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A full message that needs to be shown to the business. * </pre> * * <code>.google.shopping.merchant.issueresolution.v1.TextWithTooltip full_message = 3;</code> */ public Builder setFullMessage( com.google.shopping.merchant.issueresolution.v1.TextWithTooltip.Builder builderForValue) { if (fullMessageBuilder_ == null) { fullMessage_ = builderForValue.build(); } else { fullMessageBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A full message that needs to be shown to the business. * </pre> * * <code>.google.shopping.merchant.issueresolution.v1.TextWithTooltip full_message = 3;</code> */ public Builder mergeFullMessage( com.google.shopping.merchant.issueresolution.v1.TextWithTooltip value) { if (fullMessageBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && fullMessage_ != null && fullMessage_ != com.google.shopping.merchant.issueresolution.v1.TextWithTooltip .getDefaultInstance()) { getFullMessageBuilder().mergeFrom(value); } else { fullMessage_ = value; } } else { fullMessageBuilder_.mergeFrom(value); } if (fullMessage_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * A full message that needs to be shown to the business. * </pre> * * <code>.google.shopping.merchant.issueresolution.v1.TextWithTooltip full_message = 3;</code> */ public Builder clearFullMessage() { bitField0_ = (bitField0_ & ~0x00000002); fullMessage_ = null; if (fullMessageBuilder_ != null) { fullMessageBuilder_.dispose(); fullMessageBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * A full message that needs to be shown to the business. * </pre> * * <code>.google.shopping.merchant.issueresolution.v1.TextWithTooltip full_message = 3;</code> */ public com.google.shopping.merchant.issueresolution.v1.TextWithTooltip.Builder getFullMessageBuilder() { bitField0_ |= 0x00000002; onChanged(); return getFullMessageFieldBuilder().getBuilder(); } /** * * * <pre> * A full message that needs to be shown to the business. * </pre> * * <code>.google.shopping.merchant.issueresolution.v1.TextWithTooltip full_message = 3;</code> */ public com.google.shopping.merchant.issueresolution.v1.TextWithTooltipOrBuilder getFullMessageOrBuilder() { if (fullMessageBuilder_ != null) { return fullMessageBuilder_.getMessageOrBuilder(); } else { return fullMessage_ == null ? com.google.shopping.merchant.issueresolution.v1.TextWithTooltip.getDefaultInstance() : fullMessage_; } } /** * * * <pre> * A full message that needs to be shown to the business. * </pre> * * <code>.google.shopping.merchant.issueresolution.v1.TextWithTooltip full_message = 3;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.shopping.merchant.issueresolution.v1.TextWithTooltip, com.google.shopping.merchant.issueresolution.v1.TextWithTooltip.Builder, com.google.shopping.merchant.issueresolution.v1.TextWithTooltipOrBuilder> getFullMessageFieldBuilder() { if (fullMessageBuilder_ == null) { fullMessageBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.shopping.merchant.issueresolution.v1.TextWithTooltip, com.google.shopping.merchant.issueresolution.v1.TextWithTooltip.Builder, com.google.shopping.merchant.issueresolution.v1.TextWithTooltipOrBuilder>( getFullMessage(), getParentForChildren(), isClean()); fullMessage_ = null; } return fullMessageBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.shopping.merchant.issueresolution.v1.Callout) } // @@protoc_insertion_point(class_scope:google.shopping.merchant.issueresolution.v1.Callout) private static final com.google.shopping.merchant.issueresolution.v1.Callout DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.shopping.merchant.issueresolution.v1.Callout(); } public static com.google.shopping.merchant.issueresolution.v1.Callout getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<Callout> PARSER = new com.google.protobuf.AbstractParser<Callout>() { @java.lang.Override public Callout parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<Callout> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<Callout> getParserForType() { return PARSER; } @java.lang.Override public com.google.shopping.merchant.issueresolution.v1.Callout getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/fineract
37,085
fineract-investor/src/test/java/org/apache/fineract/investor/cob/loan/LoanAccountOwnerTransferBusinessStepTest.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.fineract.investor.cob.loan; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mockStatic; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoInteractions; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; import java.math.MathContext; import java.math.RoundingMode; import java.time.LocalDate; import java.time.ZoneId; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.stream.Stream; import org.apache.fineract.infrastructure.businessdate.domain.BusinessDateType; import org.apache.fineract.infrastructure.core.domain.ActionContext; import org.apache.fineract.infrastructure.core.domain.FineractPlatformTenant; import org.apache.fineract.infrastructure.core.service.ThreadLocalContextUtil; import org.apache.fineract.infrastructure.event.business.domain.BusinessEvent; import org.apache.fineract.infrastructure.event.business.domain.loan.LoanAccountSnapshotBusinessEvent; import org.apache.fineract.infrastructure.event.business.service.BusinessEventNotifierService; import org.apache.fineract.investor.data.ExternalTransferStatus; import org.apache.fineract.investor.data.ExternalTransferSubStatus; import org.apache.fineract.investor.domain.ExternalAssetOwner; import org.apache.fineract.investor.domain.ExternalAssetOwnerTransfer; import org.apache.fineract.investor.domain.ExternalAssetOwnerTransferLoanMapping; import org.apache.fineract.investor.domain.ExternalAssetOwnerTransferLoanMappingRepository; import org.apache.fineract.investor.domain.ExternalAssetOwnerTransferRepository; import org.apache.fineract.investor.domain.LoanOwnershipTransferBusinessEvent; import org.apache.fineract.investor.service.DelayedSettlementAttributeService; import org.apache.fineract.investor.service.ExternalAssetOwnerTransferOutstandingInterestCalculation; import org.apache.fineract.investor.service.LoanTransferabilityService; import org.apache.fineract.organisation.monetary.domain.MoneyHelper; import org.apache.fineract.portfolio.loanaccount.domain.Loan; import org.apache.fineract.portfolio.loanaccount.domain.LoanSummary; import org.apache.fineract.portfolio.loanaccount.service.LoanJournalEntryPoster; import org.apache.fineract.portfolio.loanproduct.domain.LoanProduct; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import org.mockito.ArgumentCaptor; import org.mockito.Mock; import org.mockito.MockedStatic; import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; import org.springframework.data.domain.Sort; import org.springframework.data.jpa.domain.Specification; import org.springframework.lang.NonNull; @ExtendWith(MockitoExtension.class) public class LoanAccountOwnerTransferBusinessStepTest { public static final LocalDate FUTURE_DATE_9999_12_31 = LocalDate.of(9999, 12, 31); private static final Long LOAN_PRODUCT_ID = 2L; private final LocalDate actualDate = LocalDate.now(ZoneId.systemDefault()); private static final MockedStatic<MoneyHelper> MONEY_HELPER = mockStatic(MoneyHelper.class); @Mock private ExternalAssetOwnerTransferRepository externalAssetOwnerTransferRepository; @Mock private ExternalAssetOwnerTransferLoanMappingRepository externalAssetOwnerTransferLoanMappingRepository; @Mock private LoanJournalEntryPoster loanJournalEntryPoster; @Mock private BusinessEventNotifierService businessEventNotifierService; @Mock private LoanTransferabilityService loanTransferabilityService; @Mock private DelayedSettlementAttributeService delayedSettlementAttributeService; @Mock private ExternalAssetOwnerTransferOutstandingInterestCalculation externalAssetOwnerTransferOutstandingInterestCalculation; private LoanAccountOwnerTransferBusinessStep underTest; @BeforeAll public static void init() { MONEY_HELPER.when(MoneyHelper::getRoundingMode).thenReturn(RoundingMode.HALF_EVEN); MONEY_HELPER.when(MoneyHelper::getMathContext).thenReturn(new MathContext(12, RoundingMode.HALF_EVEN)); } @AfterAll public static void destruct() { MONEY_HELPER.close(); } @BeforeEach public void setUp() { ThreadLocalContextUtil.setTenant(new FineractPlatformTenant(1L, "default", "Default", "Asia/Kolkata", null)); ThreadLocalContextUtil.setActionContext(ActionContext.DEFAULT); ThreadLocalContextUtil.setBusinessDates(new HashMap<>(Map.of(BusinessDateType.BUSINESS_DATE, actualDate))); underTest = new LoanAccountOwnerTransferBusinessStep(externalAssetOwnerTransferRepository, externalAssetOwnerTransferLoanMappingRepository, loanJournalEntryPoster, businessEventNotifierService, loanTransferabilityService, delayedSettlementAttributeService, externalAssetOwnerTransferOutstandingInterestCalculation); } @AfterEach public void tearDown() { ThreadLocalContextUtil.reset(); } @Test public void givenLoanNoTransfer() { // given final Loan loanForProcessing = Mockito.mock(Loan.class); Long loanId = 1L; when(loanForProcessing.getId()).thenReturn(loanId); // when final Loan processedLoan = underTest.execute(loanForProcessing); // then verify(externalAssetOwnerTransferRepository, times(1)).findAll(any(Specification.class), eq(Sort.by(Sort.Direction.ASC, "id"))); verifyNoInteractions(businessEventNotifierService, loanTransferabilityService, loanJournalEntryPoster); assertEquals(processedLoan, loanForProcessing); } @Test public void givenLoanTwoTransferButInvalidTransfers() { // given final LoanProduct loanProduct = Mockito.mock(LoanProduct.class); when(loanProduct.getId()).thenReturn(LOAN_PRODUCT_ID); final Loan loanForProcessing = Mockito.mock(Loan.class); when(loanForProcessing.getId()).thenReturn(1L); when(loanForProcessing.getLoanProduct()).thenReturn(loanProduct); when(delayedSettlementAttributeService.isEnabled(LOAN_PRODUCT_ID)).thenReturn(false); ExternalAssetOwnerTransfer firstResponseItem = Mockito.mock(ExternalAssetOwnerTransfer.class); ExternalAssetOwnerTransfer secondResponseItem = Mockito.mock(ExternalAssetOwnerTransfer.class); when(firstResponseItem.getStatus()).thenReturn(ExternalTransferStatus.PENDING); when(secondResponseItem.getStatus()).thenReturn(ExternalTransferStatus.ACTIVE); List<ExternalAssetOwnerTransfer> response = List.of(firstResponseItem, secondResponseItem); when(externalAssetOwnerTransferRepository.findAll(any(Specification.class), eq(Sort.by(Sort.Direction.ASC, "id")))) .thenReturn(response); // when IllegalStateException exception = assertThrows(IllegalStateException.class, () -> underTest.execute(loanForProcessing)); // then assertEquals("Illegal transfer found. Expected PENDING and BUYBACK, found: PENDING and ACTIVE", exception.getMessage()); verify(externalAssetOwnerTransferRepository, times(1)).findAll(any(Specification.class), eq(Sort.by(Sort.Direction.ASC, "id"))); verifyNoInteractions(businessEventNotifierService, loanTransferabilityService, loanJournalEntryPoster); } @Test public void givenSameDaySaleAndBuybackWithDelayedSettlement() { // given final LoanProduct loanProduct = Mockito.mock(LoanProduct.class); when(loanProduct.getId()).thenReturn(LOAN_PRODUCT_ID); final Loan loanForProcessing = Mockito.mock(Loan.class); when(loanForProcessing.getId()).thenReturn(1L); when(loanForProcessing.getLoanProduct()).thenReturn(loanProduct); when(delayedSettlementAttributeService.isEnabled(LOAN_PRODUCT_ID)).thenReturn(true); ExternalAssetOwnerTransfer firstResponseItem = Mockito.mock(ExternalAssetOwnerTransfer.class); ExternalAssetOwnerTransfer secondResponseItem = Mockito.mock(ExternalAssetOwnerTransfer.class); when(firstResponseItem.getStatus()).thenReturn(ExternalTransferStatus.PENDING); when(secondResponseItem.getStatus()).thenReturn(ExternalTransferStatus.BUYBACK); List<ExternalAssetOwnerTransfer> response = List.of(firstResponseItem, secondResponseItem); when(externalAssetOwnerTransferRepository.findAll(any(Specification.class), eq(Sort.by(Sort.Direction.ASC, "id")))) .thenReturn(response); // when IllegalStateException exception = assertThrows(IllegalStateException.class, () -> underTest.execute(loanForProcessing)); // then assertEquals("Delayed Settlement enabled, but found 2 transfers of statuses: PENDING and BUYBACK", exception.getMessage()); verify(externalAssetOwnerTransferRepository, times(1)).findAll(any(Specification.class), eq(Sort.by(Sort.Direction.ASC, "id"))); verifyNoInteractions(businessEventNotifierService, loanTransferabilityService, loanJournalEntryPoster); } @Test public void givenLoanTwoTransferSameDay() { // given final LoanProduct loanProduct = Mockito.mock(LoanProduct.class); when(loanProduct.getId()).thenReturn(LOAN_PRODUCT_ID); final Loan loanForProcessing = Mockito.mock(Loan.class); when(loanForProcessing.getId()).thenReturn(1L); when(loanForProcessing.getLoanProduct()).thenReturn(loanProduct); when(delayedSettlementAttributeService.isEnabled(LOAN_PRODUCT_ID)).thenReturn(false); ExternalAssetOwnerTransfer firstResponseItem = Mockito.mock(ExternalAssetOwnerTransfer.class); ExternalAssetOwnerTransfer secondResponseItem = Mockito.mock(ExternalAssetOwnerTransfer.class); ExternalAssetOwnerTransfer firstSaveResult = Mockito.mock(ExternalAssetOwnerTransfer.class); ExternalAssetOwnerTransfer secondSaveResult = Mockito.mock(ExternalAssetOwnerTransfer.class); ExternalAssetOwnerTransfer thirdSaveResult = Mockito.mock(ExternalAssetOwnerTransfer.class); ExternalAssetOwnerTransfer fourthSaveResult = Mockito.mock(ExternalAssetOwnerTransfer.class); when(externalAssetOwnerTransferRepository.save(any(ExternalAssetOwnerTransfer.class))).thenReturn(firstSaveResult) .thenReturn(secondSaveResult).thenReturn(thirdSaveResult).thenReturn(fourthSaveResult); when(firstResponseItem.getStatus()).thenReturn(ExternalTransferStatus.PENDING); when(secondResponseItem.getStatus()).thenReturn(ExternalTransferStatus.BUYBACK); List<ExternalAssetOwnerTransfer> response = List.of(firstResponseItem, secondResponseItem); when(externalAssetOwnerTransferRepository.findAll(any(Specification.class), eq(Sort.by(Sort.Direction.ASC, "id")))) .thenReturn(response); ArgumentCaptor<ExternalAssetOwnerTransfer> externalAssetOwnerTransferArgumentCaptor = ArgumentCaptor .forClass(ExternalAssetOwnerTransfer.class); // when final Loan processedLoan = underTest.execute(loanForProcessing); // then verify(externalAssetOwnerTransferRepository, times(1)).findAll(any(Specification.class), eq(Sort.by(Sort.Direction.ASC, "id"))); verify(firstResponseItem).setEffectiveDateTo(actualDate); verify(externalAssetOwnerTransferRepository, times(4)).save(externalAssetOwnerTransferArgumentCaptor.capture()); assertEquals(externalAssetOwnerTransferArgumentCaptor.getAllValues().get(0).getOwner(), externalAssetOwnerTransferArgumentCaptor.getAllValues().get(1).getOwner()); assertEquals(externalAssetOwnerTransferArgumentCaptor.getAllValues().get(0).getExternalId(), externalAssetOwnerTransferArgumentCaptor.getAllValues().get(1).getExternalId()); assertEquals(ExternalTransferStatus.CANCELLED, externalAssetOwnerTransferArgumentCaptor.getAllValues().get(1).getStatus()); assertEquals(ExternalTransferSubStatus.SAMEDAY_TRANSFERS, externalAssetOwnerTransferArgumentCaptor.getAllValues().get(1).getSubStatus()); assertEquals(actualDate, externalAssetOwnerTransferArgumentCaptor.getAllValues().get(1).getSettlementDate()); assertEquals(externalAssetOwnerTransferArgumentCaptor.getAllValues().get(0).getLoanId(), externalAssetOwnerTransferArgumentCaptor.getAllValues().get(1).getLoanId()); assertEquals(externalAssetOwnerTransferArgumentCaptor.getAllValues().get(0).getPurchasePriceRatio(), externalAssetOwnerTransferArgumentCaptor.getAllValues().get(1).getPurchasePriceRatio()); assertEquals(actualDate, externalAssetOwnerTransferArgumentCaptor.getAllValues().get(1).getEffectiveDateFrom()); assertEquals(actualDate, externalAssetOwnerTransferArgumentCaptor.getAllValues().get(1).getEffectiveDateTo()); assertEquals(externalAssetOwnerTransferArgumentCaptor.getAllValues().get(2).getOwner(), externalAssetOwnerTransferArgumentCaptor.getAllValues().get(3).getOwner()); assertEquals(externalAssetOwnerTransferArgumentCaptor.getAllValues().get(2).getExternalId(), externalAssetOwnerTransferArgumentCaptor.getAllValues().get(3).getExternalId()); assertEquals(ExternalTransferStatus.CANCELLED, externalAssetOwnerTransferArgumentCaptor.getAllValues().get(3).getStatus()); assertEquals(ExternalTransferSubStatus.SAMEDAY_TRANSFERS, externalAssetOwnerTransferArgumentCaptor.getAllValues().get(3).getSubStatus()); assertEquals(actualDate, externalAssetOwnerTransferArgumentCaptor.getAllValues().get(3).getSettlementDate()); assertEquals(externalAssetOwnerTransferArgumentCaptor.getAllValues().get(2).getLoanId(), externalAssetOwnerTransferArgumentCaptor.getAllValues().get(3).getLoanId()); assertEquals(externalAssetOwnerTransferArgumentCaptor.getAllValues().get(2).getPurchasePriceRatio(), externalAssetOwnerTransferArgumentCaptor.getAllValues().get(3).getPurchasePriceRatio()); assertEquals(actualDate, externalAssetOwnerTransferArgumentCaptor.getAllValues().get(3).getEffectiveDateFrom()); assertEquals(actualDate, externalAssetOwnerTransferArgumentCaptor.getAllValues().get(3).getEffectiveDateTo()); assertEquals(processedLoan, loanForProcessing); verifyNoInteractions(loanTransferabilityService, loanJournalEntryPoster); ArgumentCaptor<BusinessEvent<?>> businessEventArgumentCaptor = verifyBusinessEvents(2); verifyLoanTransferBusinessEvent(businessEventArgumentCaptor, 0, loanForProcessing, secondSaveResult); verifyLoanTransferBusinessEvent(businessEventArgumentCaptor, 1, loanForProcessing, fourthSaveResult); } private static Stream<Arguments> buybackStatusDataProvider() { return Stream.of(Arguments.of(ExternalTransferStatus.BUYBACK_INTERMEDIATE), Arguments.of(ExternalTransferStatus.BUYBACK)); } @ParameterizedTest @MethodSource("buybackStatusDataProvider") public void givenLoanBuyback(final ExternalTransferStatus buybackStatus) { // given final Loan loanForProcessing = Mockito.mock(Loan.class); when(loanForProcessing.getId()).thenReturn(1L); LoanSummary loanSummary = Mockito.mock(LoanSummary.class); when(loanForProcessing.getSummary()).thenReturn(loanSummary); ExternalAssetOwnerTransfer firstResponseItem = Mockito.mock(ExternalAssetOwnerTransfer.class); ExternalAssetOwnerTransfer secondResponseItem = Mockito.mock(ExternalAssetOwnerTransfer.class); when(firstResponseItem.getStatus()).thenReturn(buybackStatus); List<ExternalAssetOwnerTransfer> response = List.of(firstResponseItem); when(externalAssetOwnerTransferRepository.findAll(any(Specification.class), eq(Sort.by(Sort.Direction.ASC, "id")))) .thenReturn(response); when(externalAssetOwnerTransferRepository.findOne(any(Specification.class))).thenReturn(Optional.of(secondResponseItem)); ArgumentCaptor<ExternalAssetOwnerTransfer> externalAssetOwnerTransferArgumentCaptor = ArgumentCaptor .forClass(ExternalAssetOwnerTransfer.class); when(externalAssetOwnerTransferRepository.save(firstResponseItem)).thenReturn(firstResponseItem); when(externalAssetOwnerTransferRepository.save(secondResponseItem)).thenReturn(secondResponseItem); // when final Loan processedLoan = underTest.execute(loanForProcessing); // then verifyNoInteractions(loanTransferabilityService); verify(externalAssetOwnerTransferRepository, times(1)).findAll(any(Specification.class), eq(Sort.by(Sort.Direction.ASC, "id"))); verify(firstResponseItem).setEffectiveDateTo(actualDate); verify(externalAssetOwnerTransferRepository, times(2)).save(externalAssetOwnerTransferArgumentCaptor.capture()); verify(secondResponseItem).setEffectiveDateTo(actualDate); verify(externalAssetOwnerTransferLoanMappingRepository, times(1)).deleteByLoanIdAndOwnerTransfer(1L, secondResponseItem); assertEquals(processedLoan, loanForProcessing); verify(loanJournalEntryPoster).postJournalEntriesForExternalOwnerTransfer(loanForProcessing, firstResponseItem, null); verifyNoMoreInteractions(loanJournalEntryPoster); ArgumentCaptor<BusinessEvent<?>> businessEventArgumentCaptor = verifyBusinessEvents(2); verifyLoanTransferBusinessEvent(businessEventArgumentCaptor, 0, loanForProcessing, firstResponseItem); verifyLoanAccountSnapshotBusinessEvent(businessEventArgumentCaptor, 1, loanForProcessing); } private static Stream<Arguments> loanSaleTransferableDataProvider() { return Stream.of(Arguments.of(false, ExternalTransferStatus.PENDING, ExternalTransferStatus.ACTIVE), Arguments.of(true, ExternalTransferStatus.PENDING_INTERMEDIATE, ExternalTransferStatus.ACTIVE_INTERMEDIATE)); } @ParameterizedTest @MethodSource("loanSaleTransferableDataProvider") public void givenLoanSaleTransferable(final boolean isDelayedSettlementEnabled, final ExternalTransferStatus pendingStatus, final ExternalTransferStatus expectedActiveStatus) { // given final LoanProduct loanProduct = Mockito.mock(LoanProduct.class); when(loanProduct.getId()).thenReturn(LOAN_PRODUCT_ID); final Loan loanForProcessing = Mockito.mock(Loan.class); when(loanForProcessing.getId()).thenReturn(1L); when(loanForProcessing.getLoanProduct()).thenReturn(loanProduct); when(delayedSettlementAttributeService.isEnabled(LOAN_PRODUCT_ID)).thenReturn(isDelayedSettlementEnabled); LoanSummary loanSummary = Mockito.mock(LoanSummary.class); when(loanForProcessing.getSummary()).thenReturn(loanSummary); ExternalAssetOwnerTransfer pendingTransfer = new ExternalAssetOwnerTransfer(); pendingTransfer.setStatus(pendingStatus); List<ExternalAssetOwnerTransfer> response = List.of(pendingTransfer); when(externalAssetOwnerTransferRepository.findAll(any(Specification.class), eq(Sort.by(Sort.Direction.ASC, "id")))) .thenReturn(response); when(loanTransferabilityService.isTransferable(loanForProcessing, pendingTransfer)).thenReturn(true); ExternalAssetOwnerTransfer savedNewTransfer = new ExternalAssetOwnerTransfer(); savedNewTransfer.setStatus(expectedActiveStatus); when(externalAssetOwnerTransferRepository.save(any())).thenReturn(pendingTransfer).thenReturn(savedNewTransfer); // when final Loan processedLoan = underTest.execute(loanForProcessing); // then verify(loanTransferabilityService).isTransferable(loanForProcessing, pendingTransfer); verifyNoMoreInteractions(loanTransferabilityService); verify(externalAssetOwnerTransferRepository).findAll(any(Specification.class), eq(Sort.by(Sort.Direction.ASC, "id"))); ArgumentCaptor<ExternalAssetOwnerTransfer> externalAssetOwnerTransferArgumentCaptor = ArgumentCaptor .forClass(ExternalAssetOwnerTransfer.class); verify(externalAssetOwnerTransferRepository, times(2)).save(externalAssetOwnerTransferArgumentCaptor.capture()); ExternalAssetOwnerTransfer capturedPendingTransfer = externalAssetOwnerTransferArgumentCaptor.getAllValues().get(0); ExternalAssetOwnerTransfer capturedActiveTransfer = externalAssetOwnerTransferArgumentCaptor.getAllValues().get(1); assertEquals(actualDate, capturedPendingTransfer.getEffectiveDateTo()); assertCommonFieldsOfPendingAndActiveTransfers(capturedPendingTransfer, capturedActiveTransfer); assertEquals(expectedActiveStatus, capturedActiveTransfer.getStatus()); assertEquals(actualDate, capturedActiveTransfer.getSettlementDate()); assertEquals(actualDate.plusDays(1), capturedActiveTransfer.getEffectiveDateFrom()); assertEquals(FUTURE_DATE_9999_12_31, capturedActiveTransfer.getEffectiveDateTo()); ArgumentCaptor<ExternalAssetOwnerTransferLoanMapping> externalAssetOwnerTransferLoanMappingArgumentCaptor = ArgumentCaptor .forClass(ExternalAssetOwnerTransferLoanMapping.class); verify(externalAssetOwnerTransferLoanMappingRepository).save(externalAssetOwnerTransferLoanMappingArgumentCaptor.capture()); assertEquals(1L, externalAssetOwnerTransferLoanMappingArgumentCaptor.getValue().getLoanId()); assertEquals(savedNewTransfer, externalAssetOwnerTransferLoanMappingArgumentCaptor.getValue().getOwnerTransfer()); assertEquals(processedLoan, loanForProcessing); verify(externalAssetOwnerTransferLoanMappingRepository).save(externalAssetOwnerTransferLoanMappingArgumentCaptor.capture()); verify(loanJournalEntryPoster).postJournalEntriesForExternalOwnerTransfer(loanForProcessing, savedNewTransfer, null); verifyNoMoreInteractions(loanJournalEntryPoster); ArgumentCaptor<BusinessEvent<?>> businessEventArgumentCaptor = verifyBusinessEvents(2); verifyLoanTransferBusinessEvent(businessEventArgumentCaptor, 0, loanForProcessing, savedNewTransfer); verifyLoanAccountSnapshotBusinessEvent(businessEventArgumentCaptor, 1, loanForProcessing); } private static Stream<Arguments> loanSaleNotTransferableDataProvider() { return Stream.of(Arguments.of(ExternalTransferStatus.PENDING, ExternalTransferSubStatus.BALANCE_ZERO), Arguments.of(ExternalTransferStatus.PENDING, ExternalTransferSubStatus.BALANCE_NEGATIVE), Arguments.of(ExternalTransferStatus.PENDING_INTERMEDIATE, ExternalTransferSubStatus.BALANCE_ZERO), Arguments.of(ExternalTransferStatus.PENDING_INTERMEDIATE, ExternalTransferSubStatus.BALANCE_NEGATIVE)); } @ParameterizedTest @MethodSource("loanSaleNotTransferableDataProvider") public void givenLoanSaleNotTransferable(final ExternalTransferStatus pendingStatus, final ExternalTransferSubStatus expectedSubStatus) { // given final Loan loanForProcessing = Mockito.mock(Loan.class); when(loanForProcessing.getId()).thenReturn(1L); ExternalAssetOwnerTransfer pendingTransfer = new ExternalAssetOwnerTransfer(); pendingTransfer.setStatus(pendingStatus); List<ExternalAssetOwnerTransfer> response = List.of(pendingTransfer); when(externalAssetOwnerTransferRepository.findAll(any(Specification.class), eq(Sort.by(Sort.Direction.ASC, "id")))) .thenReturn(response); when(loanTransferabilityService.isTransferable(loanForProcessing, pendingTransfer)).thenReturn(false); when(loanTransferabilityService.getDeclinedSubStatus(loanForProcessing)).thenReturn(expectedSubStatus); ExternalAssetOwnerTransfer savedNewTransfer = Mockito.mock(ExternalAssetOwnerTransfer.class); when(savedNewTransfer.getStatus()).thenReturn(ExternalTransferStatus.DECLINED); when(externalAssetOwnerTransferRepository.save(any())).thenReturn(pendingTransfer).thenReturn(savedNewTransfer); // when final Loan processedLoan = underTest.execute(loanForProcessing); // then verify(loanTransferabilityService).isTransferable(loanForProcessing, pendingTransfer); verify(externalAssetOwnerTransferRepository, times(1)).findAll(any(Specification.class), eq(Sort.by(Sort.Direction.ASC, "id"))); ArgumentCaptor<ExternalAssetOwnerTransfer> externalAssetOwnerTransferArgumentCaptor = ArgumentCaptor .forClass(ExternalAssetOwnerTransfer.class); verify(externalAssetOwnerTransferRepository, times(2)).save(externalAssetOwnerTransferArgumentCaptor.capture()); ExternalAssetOwnerTransfer capturedPendingTransfer = externalAssetOwnerTransferArgumentCaptor.getAllValues().get(0); ExternalAssetOwnerTransfer capturedActiveTransfer = externalAssetOwnerTransferArgumentCaptor.getAllValues().get(1); assertEquals(actualDate, capturedPendingTransfer.getEffectiveDateTo()); assertCommonFieldsOfPendingAndActiveTransfers(capturedPendingTransfer, capturedActiveTransfer); assertEquals(ExternalTransferStatus.DECLINED, capturedActiveTransfer.getStatus()); assertEquals(expectedSubStatus, capturedActiveTransfer.getSubStatus()); assertEquals(actualDate, capturedActiveTransfer.getSettlementDate()); assertEquals(actualDate, capturedActiveTransfer.getEffectiveDateFrom()); assertEquals(actualDate, capturedActiveTransfer.getEffectiveDateTo()); assertEquals(processedLoan, loanForProcessing); verifyNoInteractions(loanJournalEntryPoster); ArgumentCaptor<BusinessEvent<?>> businessEventArgumentCaptor = verifyBusinessEvents(1); verifyLoanTransferBusinessEvent(businessEventArgumentCaptor, 0, loanForProcessing, savedNewTransfer); } @Test public void testSaleLoanWithDelayedSettlementFromIntermediateToInvestor() { // given final LoanProduct loanProduct = Mockito.mock(LoanProduct.class); when(loanProduct.getId()).thenReturn(LOAN_PRODUCT_ID); final Loan loanForProcessing = Mockito.mock(Loan.class); when(loanForProcessing.getId()).thenReturn(1L); when(loanForProcessing.getLoanProduct()).thenReturn(loanProduct); when(delayedSettlementAttributeService.isEnabled(LOAN_PRODUCT_ID)).thenReturn(true); LoanSummary loanSummary = Mockito.mock(LoanSummary.class); when(loanForProcessing.getSummary()).thenReturn(loanSummary); ExternalAssetOwner previousOwner = new ExternalAssetOwner(); ExternalAssetOwnerTransfer activeIntermediateTransfer = new ExternalAssetOwnerTransfer(); activeIntermediateTransfer.setOwner(previousOwner); activeIntermediateTransfer.setStatus(ExternalTransferStatus.ACTIVE_INTERMEDIATE); when(externalAssetOwnerTransferRepository.findOne(any(Specification.class))).thenReturn(Optional.of(activeIntermediateTransfer)); ExternalAssetOwnerTransfer pendingTransfer = new ExternalAssetOwnerTransfer(); pendingTransfer.setStatus(ExternalTransferStatus.PENDING); List<ExternalAssetOwnerTransfer> response = List.of(pendingTransfer); when(externalAssetOwnerTransferRepository.findAll(any(Specification.class), eq(Sort.by(Sort.Direction.ASC, "id")))) .thenReturn(response); when(loanTransferabilityService.isTransferable(loanForProcessing, pendingTransfer)).thenReturn(true); ExternalAssetOwnerTransfer savedNewTransfer = new ExternalAssetOwnerTransfer(); savedNewTransfer.setStatus(ExternalTransferStatus.ACTIVE); when(externalAssetOwnerTransferRepository.save(any())).thenReturn(pendingTransfer).thenReturn(savedNewTransfer); // when final Loan processedLoan = underTest.execute(loanForProcessing); // then verify(loanTransferabilityService).isTransferable(loanForProcessing, pendingTransfer); verifyNoMoreInteractions(loanTransferabilityService); verify(externalAssetOwnerTransferRepository).findAll(any(Specification.class), eq(Sort.by(Sort.Direction.ASC, "id"))); verify(externalAssetOwnerTransferRepository).findOne(any(Specification.class)); ArgumentCaptor<ExternalAssetOwnerTransfer> externalAssetOwnerTransferArgumentCaptor = ArgumentCaptor .forClass(ExternalAssetOwnerTransfer.class); verify(externalAssetOwnerTransferRepository, times(3)).save(externalAssetOwnerTransferArgumentCaptor.capture()); ExternalAssetOwnerTransfer capturedActiveIntermediateTransfer = externalAssetOwnerTransferArgumentCaptor.getAllValues().get(0); ExternalAssetOwnerTransfer capturedPendingTransfer = externalAssetOwnerTransferArgumentCaptor.getAllValues().get(1); ExternalAssetOwnerTransfer capturedActiveTransfer = externalAssetOwnerTransferArgumentCaptor.getAllValues().get(2); assertEquals(actualDate, capturedActiveIntermediateTransfer.getEffectiveDateTo()); assertEquals(actualDate, capturedPendingTransfer.getEffectiveDateTo()); assertCommonFieldsOfPendingAndActiveTransfers(capturedPendingTransfer, capturedActiveTransfer); assertEquals(ExternalTransferStatus.ACTIVE, capturedActiveTransfer.getStatus()); assertEquals(actualDate, capturedActiveTransfer.getSettlementDate()); assertEquals(actualDate.plusDays(1), capturedActiveTransfer.getEffectiveDateFrom()); assertEquals(FUTURE_DATE_9999_12_31, capturedActiveTransfer.getEffectiveDateTo()); ArgumentCaptor<ExternalAssetOwnerTransferLoanMapping> externalAssetOwnerTransferLoanMappingArgumentCaptor = ArgumentCaptor .forClass(ExternalAssetOwnerTransferLoanMapping.class); verify(externalAssetOwnerTransferLoanMappingRepository, times(1)) .save(externalAssetOwnerTransferLoanMappingArgumentCaptor.capture()); assertEquals(1L, externalAssetOwnerTransferLoanMappingArgumentCaptor.getValue().getLoanId()); assertEquals(savedNewTransfer, externalAssetOwnerTransferLoanMappingArgumentCaptor.getValue().getOwnerTransfer()); assertEquals(processedLoan, loanForProcessing); verify(loanJournalEntryPoster).postJournalEntriesForExternalOwnerTransfer(loanForProcessing, savedNewTransfer, previousOwner); verifyNoMoreInteractions(loanJournalEntryPoster); ArgumentCaptor<BusinessEvent<?>> businessEventArgumentCaptor = verifyBusinessEvents(2); verifyLoanTransferBusinessEvent(businessEventArgumentCaptor, 0, loanForProcessing, savedNewTransfer); verifyLoanAccountSnapshotBusinessEvent(businessEventArgumentCaptor, 1, loanForProcessing); } @Test public void testSaleLoanWithDelayedSettlementFromIntermediateToInvestorActiveIntermediateTransferNotFound() { // given final LoanProduct loanProduct = Mockito.mock(LoanProduct.class); when(loanProduct.getId()).thenReturn(LOAN_PRODUCT_ID); final Loan loanForProcessing = Mockito.mock(Loan.class); when(loanForProcessing.getId()).thenReturn(1L); when(loanForProcessing.getLoanProduct()).thenReturn(loanProduct); when(delayedSettlementAttributeService.isEnabled(LOAN_PRODUCT_ID)).thenReturn(true); when(externalAssetOwnerTransferRepository.findOne(any(Specification.class))).thenReturn(Optional.empty()); ExternalAssetOwnerTransfer pendingTransfer = new ExternalAssetOwnerTransfer(); pendingTransfer.setStatus(ExternalTransferStatus.PENDING); List<ExternalAssetOwnerTransfer> response = List.of(pendingTransfer); when(externalAssetOwnerTransferRepository.findAll(any(Specification.class), eq(Sort.by(Sort.Direction.ASC, "id")))) .thenReturn(response); when(loanTransferabilityService.isTransferable(loanForProcessing, pendingTransfer)).thenReturn(true); // when IllegalStateException exception = assertThrows(IllegalStateException.class, () -> underTest.execute(loanForProcessing)); // then assertEquals("Expected a effective transfer of ACTIVE_INTERMEDIATE status to be present.", exception.getMessage()); verify(loanTransferabilityService).isTransferable(loanForProcessing, pendingTransfer); verifyNoMoreInteractions(loanTransferabilityService); verifyNoInteractions(loanJournalEntryPoster); verify(externalAssetOwnerTransferRepository).findAll(any(Specification.class), eq(Sort.by(Sort.Direction.ASC, "id"))); verify(externalAssetOwnerTransferRepository).findOne(any(Specification.class)); verify(externalAssetOwnerTransferRepository, never()).save(any(ExternalAssetOwnerTransfer.class)); verifyNoInteractions(externalAssetOwnerTransferLoanMappingRepository); verifyBusinessEvents(0); } @Test public void testGetEnumStyledNameSuccessScenario() { final String actualEnumName = underTest.getEnumStyledName(); assertNotNull(actualEnumName); assertEquals("EXTERNAL_ASSET_OWNER_TRANSFER", actualEnumName); } @Test public void testGetHumanReadableNameSuccessScenario() { final String actualEnumName = underTest.getHumanReadableName(); assertNotNull(actualEnumName); assertEquals("Execute external asset owner transfer", actualEnumName); } @NonNull private ArgumentCaptor<BusinessEvent<?>> verifyBusinessEvents(int expectedBusinessEvents) { @SuppressWarnings("unchecked") ArgumentCaptor<BusinessEvent<?>> businessEventArgumentCaptor = ArgumentCaptor.forClass(BusinessEvent.class); verify(businessEventNotifierService, times(expectedBusinessEvents)).notifyPostBusinessEvent(businessEventArgumentCaptor.capture()); return businessEventArgumentCaptor; } private void verifyLoanTransferBusinessEvent(ArgumentCaptor<BusinessEvent<?>> businessEventArgumentCaptor, int index, Loan expectedLoan, ExternalAssetOwnerTransfer expectedAssetOwnerTransfer) { assertTrue(businessEventArgumentCaptor.getAllValues().get(index) instanceof LoanOwnershipTransferBusinessEvent); assertEquals(expectedLoan, ((LoanOwnershipTransferBusinessEvent) businessEventArgumentCaptor.getAllValues().get(index)).getLoan()); assertEquals(expectedAssetOwnerTransfer, ((LoanOwnershipTransferBusinessEvent) businessEventArgumentCaptor.getAllValues().get(index)).get()); } private void verifyLoanAccountSnapshotBusinessEvent(ArgumentCaptor<BusinessEvent<?>> businessEventArgumentCaptor, int index, Loan expectedLoan) { assertTrue(businessEventArgumentCaptor.getAllValues().get(index) instanceof LoanAccountSnapshotBusinessEvent); assertEquals(expectedLoan, ((LoanAccountSnapshotBusinessEvent) businessEventArgumentCaptor.getAllValues().get(index)).get()); } private void assertCommonFieldsOfPendingAndActiveTransfers(final ExternalAssetOwnerTransfer pendingTransfer, final ExternalAssetOwnerTransfer activeTransfer) { assertEquals(pendingTransfer.getOwner(), activeTransfer.getOwner()); assertEquals(pendingTransfer.getExternalId(), activeTransfer.getExternalId()); assertEquals(pendingTransfer.getLoanId(), activeTransfer.getLoanId()); assertEquals(pendingTransfer.getPurchasePriceRatio(), activeTransfer.getPurchasePriceRatio()); } }
apache/poi
36,756
poi/src/main/java/org/apache/poi/poifs/filesystem/POIFSFileSystem.java
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ package org.apache.poi.poifs.filesystem; import java.io.Closeable; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.ByteBuffer; import java.nio.channels.Channels; import java.nio.channels.FileChannel; import java.nio.channels.ReadableByteChannel; import java.nio.file.Files; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import org.apache.commons.io.output.UnsynchronizedByteArrayOutputStream; import org.apache.logging.log4j.Logger; import org.apache.poi.logging.PoiLogManager; import org.apache.poi.EmptyFileException; import org.apache.poi.poifs.common.POIFSBigBlockSize; import org.apache.poi.poifs.common.POIFSConstants; import org.apache.poi.poifs.dev.POIFSViewable; import org.apache.poi.poifs.nio.ByteArrayBackedDataSource; import org.apache.poi.poifs.nio.DataSource; import org.apache.poi.poifs.nio.FileBackedDataSource; import org.apache.poi.poifs.property.DirectoryProperty; import org.apache.poi.poifs.property.DocumentProperty; import org.apache.poi.poifs.property.PropertyTable; import org.apache.poi.poifs.storage.BATBlock; import org.apache.poi.poifs.storage.BATBlock.BATBlockAndIndex; import org.apache.poi.poifs.storage.HeaderBlock; import org.apache.poi.util.IOUtils; import org.apache.poi.util.Internal; /** * <p>This is the main class of the POIFS system; it manages the entire * life cycle of the filesystem.</p> * <p>This is the new NIO version, which uses less memory</p> */ public class POIFSFileSystem extends BlockStore implements POIFSViewable, Closeable { //arbitrarily selected; may need to increase private static final int DEFAULT_MAX_RECORD_LENGTH = 100_000; private static int MAX_RECORD_LENGTH = DEFAULT_MAX_RECORD_LENGTH; private static final int MAX_ALLOCATION_SIZE = 250_000_000; private static final Logger LOG = PoiLogManager.getLogger(POIFSFileSystem.class); /** * Maximum number size (in blocks) of the allocation table as supported by * POI. * <p> * This constant has been chosen to help POI identify corrupted data in the * header block (rather than crash immediately with {@link OutOfMemoryError} * ). It's not clear if the compound document format actually specifies any * upper limits. For files with 512 byte blocks, having an allocation table * of 65,335 blocks would correspond to a total file size of 4GB. Needless * to say, POI probably cannot handle files anywhere near that size. */ private static final int MAX_BLOCK_COUNT = 65535; private POIFSMiniStore _mini_store; private PropertyTable _property_table; private final List<BATBlock> _xbat_blocks; private final List<BATBlock> _bat_blocks; private HeaderBlock _header; private DirectoryNode _root; protected DataSource _data; /** * What big block size the file uses. Most files * use 512 bytes, but a few use 4096 */ private POIFSBigBlockSize bigBlockSize = POIFSConstants.SMALLER_BIG_BLOCK_SIZE_DETAILS; /** * @param length the max record length allowed for POIFSFileSystem */ public static void setMaxRecordLength(int length) { MAX_RECORD_LENGTH = length; } /** * @return the max record length allowed for POIFSFileSystem */ public static int getMaxRecordLength() { return MAX_RECORD_LENGTH; } private POIFSFileSystem(boolean newFS) { _header = new HeaderBlock(bigBlockSize); _property_table = new PropertyTable(_header); _mini_store = new POIFSMiniStore(this, _property_table.getRoot(), new ArrayList<>(), _header); _xbat_blocks = new ArrayList<>(); _bat_blocks = new ArrayList<>(); _root = null; if (newFS) { createNewDataSource(); } } protected void createNewDataSource() { // Data needs to initially hold just the header block, // a single bat block, and an empty properties section long blockSize = Math.multiplyExact(bigBlockSize.getBigBlockSize(), 3L); _data = new ByteArrayBackedDataSource(IOUtils.safelyAllocate(blockSize, MAX_RECORD_LENGTH)); } /** * Constructor, intended for writing */ public POIFSFileSystem() { this(true); // Reserve block 0 for the start of the Properties Table // Create a single empty BAT, at pop that at offset 1 _header.setBATCount(1); _header.setBATArray(new int[]{1}); BATBlock bb = BATBlock.createEmptyBATBlock(bigBlockSize, false); bb.setOurBlockIndex(1); _bat_blocks.add(bb); setNextBlock(0, POIFSConstants.END_OF_CHAIN); setNextBlock(1, POIFSConstants.FAT_SECTOR_BLOCK); _property_table.setStartBlock(0); } /** * <p>Creates a POIFSFileSystem from a {@code File}. This uses less memory than * creating from an {@code InputStream}. The File will be opened read-only</p> * * <p>Note that with this constructor, you will need to call {@link #close()} * when you're done to have the underlying file closed, as the file is * kept open during normal operation to read the data out.</p> * * @param file the File from which to read the data * @throws IOException on errors reading, or on invalid data */ public POIFSFileSystem(File file) throws IOException { this(file, true); } /** * <p>Creates a POIFSFileSystem from a {@code File}. This uses less memory than * creating from an {@code InputStream}.</p> * * <p>Note that with this constructor, you will need to call {@link #close()} * when you're done to have the underlying file closed, as the file is * kept open during normal operation to read the data out.</p> * * @param file the File from which to read or read/write the data * @param readOnly whether the POIFileSystem will only be used in read-only mode * @throws IOException on errors reading, or on invalid data */ public POIFSFileSystem(File file, boolean readOnly) throws IOException { this(null, file, readOnly, true, true); } /** * <p>Creates a POIFSFileSystem from an open {@code FileChannel}. This uses * less memory than creating from an {@code InputStream}. The stream will * be used in read-only mode.</p> * * <p>Note that with this constructor, you will need to call {@link #close()} * when you're done to have the underlying Channel closed, as the channel is * kept open during normal operation to read the data out. For legacy reasons, * the channel is not closed if there is an error creating the POIFSFileSystem.</p> * * @param channel the FileChannel from which to read the data * @throws IOException on errors reading, or on invalid data * @see #POIFSFileSystem(FileChannel, boolean, boolean) this constructor gives more control over whether to * close the provided channel */ public POIFSFileSystem(FileChannel channel) throws IOException { this(channel, true); } /** * <p>Creates a POIFSFileSystem from an open {@code FileChannel}. This uses * less memory than creating from an {@code InputStream}.</p> * * <p>Note that with this constructor, you will need to call {@link #close()} * when you're done to have the underlying Channel closed, as the channel is * kept open during normal operation to read the data out. For legacy reasons, * the channel is not closed if there is an error creating the POIFSFileSystem.</p> * * @param channel the FileChannel from which to read or read/write the data * @param readOnly whether the POIFileSystem will only be used in read-only mode * @throws IOException on errors reading, or on invalid data * @see #POIFSFileSystem(FileChannel, boolean, boolean) this constructor gives more control over whether to * close the provided channel */ public POIFSFileSystem(FileChannel channel, boolean readOnly) throws IOException { this(channel, null, readOnly, false, true); } /** * <p>Creates a POIFSFileSystem from an open {@code FileChannel}. This uses * less memory than creating from an {@code InputStream}.</p> * * <p>Note that with this constructor, you will need to call {@link #close()} * when you're done to have the underlying resources closed. The <code>closeChannel</code> * parameter controls whether the provided channel is closed.</p> * * @param channel the FileChannel from which to read or read/write the data * @param readOnly whether the POIFileSystem will only be used in read-only mode * @param closeChannel whether the provided FileChannel should be closed when * {@link #close()} is called, or when this constructor throws * an exception * @throws IOException on errors reading, or on invalid data * @since POI 5.1.0 */ public POIFSFileSystem(FileChannel channel, boolean readOnly, boolean closeChannel) throws IOException { this(channel, null, readOnly, closeChannel, closeChannel); } @SuppressWarnings("java:S2095") private POIFSFileSystem(FileChannel channel, File srcFile, boolean readOnly, boolean closeChannelOnError, boolean closeChannelOnClose) throws IOException { this(false); try { // Initialize the datasource if (srcFile != null) { if (srcFile.length() == 0) throw new EmptyFileException(srcFile); FileBackedDataSource d = new FileBackedDataSource(srcFile, readOnly); channel = d.getChannel(); _data = d; } else { _data = new FileBackedDataSource(channel, readOnly, closeChannelOnClose); } // Get the header ByteBuffer headerBuffer = ByteBuffer.allocate(POIFSConstants.SMALLER_BIG_BLOCK_SIZE); IOUtils.readFully(channel, headerBuffer); // Have the header processed _header = new HeaderBlock(headerBuffer); // Now process the various entries readCoreContents(); } catch (IOException | RuntimeException e) { // Comes from Iterators etc. // TODO Decide if we can handle these better whilst // still sticking to the iterator contract if (closeChannelOnError && channel != null) { channel.close(); } throw e; } } /** * Create a POIFSFileSystem from an {@code InputStream}. Normally the stream is read until * EOF. The stream is always closed. * <p> * Some streams are usable after reaching EOF (typically those that return {@code true} * for {@code markSupported()}). In the unlikely case that the caller has such a stream * <i>and</i> needs to use it after this constructor completes, a work around is to wrap the * stream in order to trap the {@code close()} call. A convenience method ( * {@code createNonClosingInputStream()}) has been provided for this purpose: * <pre> * InputStream wrappedStream = POIFSFileSystem.createNonClosingInputStream(is); * HSSFWorkbook wb = new HSSFWorkbook(wrappedStream); * is.reset(); * doSomethingElse(is); * </pre> * Note also the special case of {@code ByteArrayInputStream} for which the {@code close()} * method does nothing. * <pre> * ByteArrayInputStream bais = ... * HSSFWorkbook wb = new HSSFWorkbook(bais); // calls bais.close() ! * bais.reset(); // no problem * doSomethingElse(bais); * </pre> * * @param stream the InputStream from which to read the data * @throws IOException on errors reading, or on invalid data */ public POIFSFileSystem(InputStream stream) throws IOException { this(false); boolean success = false; try (ReadableByteChannel channel = Channels.newChannel(stream)) { // Turn our InputStream into something NIO based // Get the header ByteBuffer headerBuffer = ByteBuffer.allocate(POIFSConstants.SMALLER_BIG_BLOCK_SIZE); IOUtils.readFully(channel, headerBuffer); // Have the header processed _header = new HeaderBlock(headerBuffer); // Sanity check the block count sanityCheckBlockCount(_header.getBATCount()); // We need to buffer the whole file into memory when // working with an InputStream. // The max possible size is when each BAT block entry is used long maxSize = BATBlock.calculateMaximumSize(_header); if (maxSize > Integer.MAX_VALUE) { throw new IllegalArgumentException("Unable read a >2gb file via an InputStream"); } // don't allow huge allocations with invalid header-values IOUtils.safelyAllocateCheck(maxSize, MAX_ALLOCATION_SIZE); ByteBuffer data = ByteBuffer.allocate((int) maxSize); // Copy in the header headerBuffer.position(0); data.put(headerBuffer); data.position(headerBuffer.capacity()); // Now read the rest of the stream IOUtils.readFully(channel, data); success = true; // Turn it into a DataSource _data = new ByteArrayBackedDataSource(data.array(), data.position()); } finally { // As per the constructor contract, always close the stream closeInputStream(stream, success); } // Now process the various entries readCoreContents(); } /** * @param stream the stream to be closed * @param success {@code false} if an exception is currently being thrown in the calling method */ private void closeInputStream(InputStream stream, boolean success) { try { stream.close(); } catch (IOException e) { if (success) { throw new IllegalStateException(e); } // else not success? Try block did not complete normally // just print stack trace and leave original ex to be thrown LOG.atError().withThrowable(e).log("can't close input stream"); } } /** * Read and process the PropertiesTable and the * FAT / XFAT blocks, so that we're ready to * work with the file */ private void readCoreContents() throws IOException { // Grab the block size bigBlockSize = _header.getBigBlockSize(); // Each block should only ever be used by one of the // FAT, XFAT or Property Table. Ensure it does ChainLoopDetector loopDetector = getChainLoopDetector(); // Read the FAT blocks for (int fatAt : _header.getBATArray()) { readBAT(fatAt, loopDetector); } // Work out how many FAT blocks remain in the XFATs int remainingFATs = _header.getBATCount() - _header.getBATArray().length; // Now read the XFAT blocks, and the FATs within them BATBlock xfat; int nextAt = _header.getXBATIndex(); for (int i = 0; i < _header.getXBATCount(); i++) { loopDetector.claim(nextAt); ByteBuffer fatData = getBlockAt(nextAt); xfat = BATBlock.createBATBlock(bigBlockSize, fatData); xfat.setOurBlockIndex(nextAt); nextAt = xfat.getValueAt(bigBlockSize.getXBATEntriesPerBlock()); _xbat_blocks.add(xfat); // Process all the (used) FATs from this XFAT int xbatFATs = Math.min(remainingFATs, bigBlockSize.getXBATEntriesPerBlock()); for (int j = 0; j < xbatFATs; j++) { int fatAt = xfat.getValueAt(j); if (fatAt == POIFSConstants.UNUSED_BLOCK || fatAt == POIFSConstants.END_OF_CHAIN) break; readBAT(fatAt, loopDetector); } remainingFATs -= xbatFATs; } // We're now able to load steams // Use this to read in the properties _property_table = new PropertyTable(_header, this); // Finally read the Small Stream FAT (SBAT) blocks BATBlock sfat; List<BATBlock> sbats = new ArrayList<>(); _mini_store = new POIFSMiniStore(this, _property_table.getRoot(), sbats, _header); nextAt = _header.getSBATStart(); for (int i = 0; i < _header.getSBATCount() && nextAt != POIFSConstants.END_OF_CHAIN; i++) { loopDetector.claim(nextAt); ByteBuffer fatData = getBlockAt(nextAt); sfat = BATBlock.createBATBlock(bigBlockSize, fatData); sfat.setOurBlockIndex(nextAt); sbats.add(sfat); nextAt = getNextBlock(nextAt); } } private void readBAT(int batAt, ChainLoopDetector loopDetector) throws IOException { loopDetector.claim(batAt); ByteBuffer fatData = getBlockAt(batAt); BATBlock bat = BATBlock.createBATBlock(bigBlockSize, fatData); bat.setOurBlockIndex(batAt); _bat_blocks.add(bat); } private BATBlock createBAT(int offset, boolean isBAT) throws IOException { // Create a new BATBlock BATBlock newBAT = BATBlock.createEmptyBATBlock(bigBlockSize, !isBAT); newBAT.setOurBlockIndex(offset); // Ensure there's a spot in the file for it ByteBuffer buffer = ByteBuffer.allocate(bigBlockSize.getBigBlockSize()); // Header isn't in BATs long writeTo = Math.multiplyExact(1L + offset, bigBlockSize.getBigBlockSize()); _data.write(buffer, writeTo); // All done return newBAT; } /** * Load the block at the given offset. */ @Override protected ByteBuffer getBlockAt(final int offset) throws IOException { // The header block doesn't count, so add one long blockWanted = offset + 1L; long startAt = blockWanted * bigBlockSize.getBigBlockSize(); try { return _data.read(bigBlockSize.getBigBlockSize(), startAt); } catch (IndexOutOfBoundsException e) { IndexOutOfBoundsException wrapped = new IndexOutOfBoundsException("Block " + offset + " not found"); wrapped.initCause(e); throw wrapped; } } /** * Load the block at the given offset, * extending the file if needed */ @Override protected ByteBuffer createBlockIfNeeded(final int offset) throws IOException { try { return getBlockAt(offset); } catch (IndexOutOfBoundsException e) { // The header block doesn't count, so add one long startAt = (offset + 1L) * bigBlockSize.getBigBlockSize(); // Allocate and write ByteBuffer buffer = ByteBuffer.allocate(getBigBlockSize()); _data.write(buffer, startAt); // Retrieve the properly backed block return getBlockAt(offset); } } /** * Returns the BATBlock that handles the specified offset, * and the relative index within it */ @Override protected BATBlockAndIndex getBATBlockAndIndex(final int offset) { return BATBlock.getBATBlockAndIndex( offset, _header, _bat_blocks ); } /** * Works out what block follows the specified one. */ @Override protected int getNextBlock(final int offset) { BATBlockAndIndex bai = getBATBlockAndIndex(offset); return bai.getBlock().getValueAt(bai.getIndex()); } /** * Changes the record of what block follows the specified one. */ @Override protected void setNextBlock(final int offset, final int nextBlock) { BATBlockAndIndex bai = getBATBlockAndIndex(offset); bai.getBlock().setValueAt( bai.getIndex(), nextBlock ); } /** * Finds a free block, and returns its offset. * This method will extend the file if needed, and if doing * so, allocate new FAT blocks to address the extra space. */ @Override protected int getFreeBlock() throws IOException { int numSectors = bigBlockSize.getBATEntriesPerBlock(); // First up, do we have any spare ones? int offset = 0; for (BATBlock bat : _bat_blocks) { if (bat.hasFreeSectors()) { // Claim one of them and return it for (int j = 0; j < numSectors; j++) { int batValue = bat.getValueAt(j); if (batValue == POIFSConstants.UNUSED_BLOCK) { // Bingo return offset + j; } } } // Move onto the next BAT offset += numSectors; } // If we get here, then there aren't any free sectors // in any of the BATs, so we need another BAT BATBlock bat = createBAT(offset, true); bat.setValueAt(0, POIFSConstants.FAT_SECTOR_BLOCK); _bat_blocks.add(bat); // Now store a reference to the BAT in the required place if (_header.getBATCount() >= 109) { // Needs to come from an XBAT BATBlock xbat = null; for (BATBlock x : _xbat_blocks) { if (x.hasFreeSectors()) { xbat = x; break; } } if (xbat == null) { // Oh joy, we need a new XBAT too... xbat = createBAT(offset + 1, false); // Allocate our new BAT as the first block in the XBAT xbat.setValueAt(0, offset); // And allocate the XBAT in the BAT bat.setValueAt(1, POIFSConstants.DIFAT_SECTOR_BLOCK); // Will go one place higher as XBAT added in offset++; // Chain it if (_xbat_blocks.isEmpty()) { _header.setXBATStart(offset); } else { _xbat_blocks.get(_xbat_blocks.size() - 1).setValueAt( bigBlockSize.getXBATEntriesPerBlock(), offset ); } _xbat_blocks.add(xbat); _header.setXBATCount(_xbat_blocks.size()); } else { // Allocate our BAT in the existing XBAT with space for (int i = 0; i < bigBlockSize.getXBATEntriesPerBlock(); i++) { if (xbat.getValueAt(i) == POIFSConstants.UNUSED_BLOCK) { xbat.setValueAt(i, offset); break; } } } } else { // Store us in the header int[] newBATs = new int[_header.getBATCount() + 1]; System.arraycopy(_header.getBATArray(), 0, newBATs, 0, newBATs.length - 1); newBATs[newBATs.length - 1] = offset; _header.setBATArray(newBATs); } _header.setBATCount(_bat_blocks.size()); // The current offset stores us, but the next one is free return offset + 1; } protected long size() throws IOException { return _data.size(); } @Override protected ChainLoopDetector getChainLoopDetector() throws IOException { return new ChainLoopDetector(_data.size()); } /** * For unit testing only! Returns the underlying * properties table */ PropertyTable _get_property_table() { return _property_table; } /** * Returns the MiniStore, which performs a similar low * level function to this, except for the small blocks. */ POIFSMiniStore getMiniStore() { return _mini_store; } /** * add a new POIFSDocument to the FileSytem * * @param document the POIFSDocument being added */ void addDocument(final POIFSDocument document) { _property_table.addProperty(document.getDocumentProperty()); } /** * add a new DirectoryProperty to the FileSystem * * @param directory the DirectoryProperty being added */ void addDirectory(final DirectoryProperty directory) { _property_table.addProperty(directory); } /** * Create a new document to be added to the root directory * * @param stream the InputStream from which the document's data * will be obtained * @param name the name of the new POIFSDocument * @return the new DocumentEntry * @throws IOException on error creating the new POIFSDocument */ public DocumentEntry createDocument(final InputStream stream, final String name) throws IOException { return getRoot().createDocument(name, stream); } /** * create a new DocumentEntry in the root entry; the data will be * provided later * * @param name the name of the new DocumentEntry * @param size the size of the new DocumentEntry * @param writer the writer of the new DocumentEntry * @return the new DocumentEntry * @throws IOException if the writer exceeds the given size */ public DocumentEntry createDocument(final String name, final int size, final POIFSWriterListener writer) throws IOException { return getRoot().createDocument(name, size, writer); } /** * create a new DirectoryEntry in the root directory * * @param name the name of the new DirectoryEntry * @return the new DirectoryEntry * @throws IOException on name duplication */ public DirectoryEntry createDirectory(final String name) throws IOException { return getRoot().createDirectory(name); } /** * Set the contents of a document in the root directory, * creating if needed, otherwise updating * * @param stream the InputStream from which the document's data * will be obtained * @param name the name of the new or existing POIFSDocument * @return the new or updated DocumentEntry * @throws IOException on error populating the POIFSDocument */ @SuppressWarnings("UnusedReturnValue") public DocumentEntry createOrUpdateDocument(final InputStream stream, final String name) throws IOException { return getRoot().createOrUpdateDocument(name, stream); } /** * Does the filesystem support an in-place write via * {@link #writeFilesystem()} ? If false, only writing out to * a brand new file via {@link #writeFilesystem(OutputStream)} * is supported. */ public boolean isInPlaceWriteable() { return (_data instanceof FileBackedDataSource) && ((FileBackedDataSource) _data).isWriteable(); } /** * Write the filesystem out to the open file. Will thrown an * {@link IllegalArgumentException} if opened from an * {@link InputStream}. * * @throws IOException thrown on errors writing to the stream */ public void writeFilesystem() throws IOException { if (!(_data instanceof FileBackedDataSource)) { throw new IllegalArgumentException( "POIFS opened from an inputstream, so writeFilesystem() may " + "not be called. Use writeFilesystem(OutputStream) instead" ); } if (!((FileBackedDataSource) _data).isWriteable()) { throw new IllegalArgumentException( "POIFS opened in read only mode, so writeFilesystem() may " + "not be called. Open the FileSystem in read-write mode first" ); } syncWithDataSource(); } /** * Write the filesystem out * * @param stream the OutputStream to which the filesystem will be * written * @throws IOException thrown on errors writing to the stream */ public void writeFilesystem(final OutputStream stream) throws IOException { // Have the datasource updated syncWithDataSource(); // Now copy the contents to the stream _data.copyTo(stream); } /** * Has our in-memory objects write their state * to their backing blocks */ private void syncWithDataSource() throws IOException { // Mini Stream + SBATs first, as mini-stream details have // to be stored in the Root Property _mini_store.syncWithDataSource(); // Properties POIFSStream propStream = new POIFSStream(this, _header.getPropertyStart()); _property_table.preWrite(); _property_table.write(propStream); // _header.setPropertyStart has been updated on write ... // HeaderBlock UnsynchronizedByteArrayOutputStream baos = UnsynchronizedByteArrayOutputStream.builder().setBufferSize( _header.getBigBlockSize().getBigBlockSize() ).get(); _header.writeData(baos); getBlockAt(-1).put(baos.toByteArray()); // BATs for (BATBlock bat : _bat_blocks) { ByteBuffer block = getBlockAt(bat.getOurBlockIndex()); bat.writeData(block); } // XBats for (BATBlock bat : _xbat_blocks) { ByteBuffer block = getBlockAt(bat.getOurBlockIndex()); bat.writeData(block); } } /** * Closes the FileSystem, freeing any underlying files, streams * and buffers. After this, you will be unable to read or * write from the FileSystem. */ @Override public void close() throws IOException { _data.close(); } /** * read in a file and write it back out again * * @param args names of the files; arg[ 0 ] is the input file, * arg[ 1 ] is the output file */ public static void main(String[] args) throws IOException { if (args.length != 2) { System.err.println( "two arguments required: input filename and output filename"); System.exit(1); } try (InputStream istream = Files.newInputStream(Paths.get(args[0]))) { try (OutputStream ostream = Files.newOutputStream(Paths.get(args[1]))) { try (POIFSFileSystem fs = new POIFSFileSystem(istream)) { fs.writeFilesystem(ostream); } } } } /** * Get the root entry * * @return the root entry */ public DirectoryNode getRoot() { if (_root == null) { _root = new DirectoryNode(_property_table.getRoot(), this, null); } return _root; } /** * open a document in the root entry's list of entries * * @param documentName the name of the document to be opened * @return a newly opened DocumentInputStream * @throws IOException if the document does not exist or the * name is that of a DirectoryEntry */ public DocumentInputStream createDocumentInputStream( final String documentName) throws IOException { return getRoot().createDocumentInputStream(documentName); } /** * remove an entry * * @param entry to be removed */ void remove(EntryNode entry) throws IOException { // If it's a document, free the blocks if (entry instanceof DocumentEntry) { POIFSDocument doc = new POIFSDocument((DocumentProperty) entry.getProperty(), this); doc.free(); } // Now zap it from the properties list _property_table.removeProperty(entry.getProperty()); } /* ********** START begin implementation of POIFSViewable ********** */ /** * Get an array of objects, some of which may implement * POIFSViewable * * @return an array of Object; may not be null, but may be empty */ @Override public Object[] getViewableArray() { if (preferArray()) { return getRoot().getViewableArray(); } return new Object[0]; } /** * Get an Iterator of objects, some of which may implement * POIFSViewable * * @return an Iterator; may not be null, but may have an empty * back end store */ @Override public Iterator<Object> getViewableIterator() { if (!preferArray()) { return getRoot().getViewableIterator(); } return Collections.emptyIterator(); } /** * Give viewers a hint as to whether to call getViewableArray or * getViewableIterator * * @return true if a viewer should call getViewableArray, false if * a viewer should call getViewableIterator */ @Override public boolean preferArray() { return getRoot().preferArray(); } /** * Provides a short description of the object, to be used when a * POIFSViewable object has not provided its contents. * * @return short description */ @Override public String getShortDescription() { return "POIFS FileSystem"; } /* ********** END begin implementation of POIFSViewable ********** */ /** * @return The Big Block size, normally 512 bytes, sometimes 4096 bytes */ public int getBigBlockSize() { return bigBlockSize.getBigBlockSize(); } /** * @return The Big Block size, normally 512 bytes, sometimes 4096 bytes */ @SuppressWarnings("WeakerAccess") public POIFSBigBlockSize getBigBlockSizeDetails() { return bigBlockSize; } /** * Creates a new POIFSFileSystem in a new {@link File}. * Use {@link #POIFSFileSystem(File)} to open an existing File, * this should only be used to create a new empty filesystem. * * @param file The file to create and open * @return The created and opened POIFSFileSystem */ public static POIFSFileSystem create(File file) throws IOException { // Create a new empty POIFS in the file try (POIFSFileSystem tmp = new POIFSFileSystem(); OutputStream out = Files.newOutputStream(file.toPath())) { tmp.writeFilesystem(out); } // Open it up again backed by the file return new POIFSFileSystem(file, false); } @Override protected int getBlockStoreBlockSize() { return getBigBlockSize(); } @Internal public PropertyTable getPropertyTable() { return _property_table; } @Internal public HeaderBlock getHeaderBlock() { return _header; } @Override protected void releaseBuffer(ByteBuffer buffer) { if (_data instanceof FileBackedDataSource) { ((FileBackedDataSource)_data).releaseBuffer(buffer); } } private static void sanityCheckBlockCount(int block_count) throws IOException { if (block_count <= 0) { throw new IOException( "Illegal block count; minimum count is 1, got " + block_count + " instead" ); } if (block_count > MAX_BLOCK_COUNT) { throw new IOException( "Block count " + block_count + " is too high. POI maximum is " + MAX_BLOCK_COUNT + "." ); } } }
googleads/google-ads-java
36,596
google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/resources/AssetOrBuilder.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v19/resources/asset.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v19.resources; public interface AssetOrBuilder extends // @@protoc_insertion_point(interface_extends:google.ads.googleads.v19.resources.Asset) com.google.protobuf.MessageOrBuilder { /** * <pre> * Immutable. The resource name of the asset. * Asset resource names have the form: * * `customers/{customer_id}/assets/{asset_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The resourceName. */ java.lang.String getResourceName(); /** * <pre> * Immutable. The resource name of the asset. * Asset resource names have the form: * * `customers/{customer_id}/assets/{asset_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for resourceName. */ com.google.protobuf.ByteString getResourceNameBytes(); /** * <pre> * Output only. The ID of the asset. * </pre> * * <code>optional int64 id = 11 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return Whether the id field is set. */ boolean hasId(); /** * <pre> * Output only. The ID of the asset. * </pre> * * <code>optional int64 id = 11 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The id. */ long getId(); /** * <pre> * Optional name of the asset. * </pre> * * <code>optional string name = 12;</code> * @return Whether the name field is set. */ boolean hasName(); /** * <pre> * Optional name of the asset. * </pre> * * <code>optional string name = 12;</code> * @return The name. */ java.lang.String getName(); /** * <pre> * Optional name of the asset. * </pre> * * <code>optional string name = 12;</code> * @return The bytes for name. */ com.google.protobuf.ByteString getNameBytes(); /** * <pre> * Output only. Type of the asset. * </pre> * * <code>.google.ads.googleads.v19.enums.AssetTypeEnum.AssetType type = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The enum numeric value on the wire for type. */ int getTypeValue(); /** * <pre> * Output only. Type of the asset. * </pre> * * <code>.google.ads.googleads.v19.enums.AssetTypeEnum.AssetType type = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The type. */ com.google.ads.googleads.v19.enums.AssetTypeEnum.AssetType getType(); /** * <pre> * A list of possible final URLs after all cross domain redirects. * </pre> * * <code>repeated string final_urls = 14;</code> * @return A list containing the finalUrls. */ java.util.List<java.lang.String> getFinalUrlsList(); /** * <pre> * A list of possible final URLs after all cross domain redirects. * </pre> * * <code>repeated string final_urls = 14;</code> * @return The count of finalUrls. */ int getFinalUrlsCount(); /** * <pre> * A list of possible final URLs after all cross domain redirects. * </pre> * * <code>repeated string final_urls = 14;</code> * @param index The index of the element to return. * @return The finalUrls at the given index. */ java.lang.String getFinalUrls(int index); /** * <pre> * A list of possible final URLs after all cross domain redirects. * </pre> * * <code>repeated string final_urls = 14;</code> * @param index The index of the value to return. * @return The bytes of the finalUrls at the given index. */ com.google.protobuf.ByteString getFinalUrlsBytes(int index); /** * <pre> * A list of possible final mobile URLs after all cross domain redirects. * </pre> * * <code>repeated string final_mobile_urls = 16;</code> * @return A list containing the finalMobileUrls. */ java.util.List<java.lang.String> getFinalMobileUrlsList(); /** * <pre> * A list of possible final mobile URLs after all cross domain redirects. * </pre> * * <code>repeated string final_mobile_urls = 16;</code> * @return The count of finalMobileUrls. */ int getFinalMobileUrlsCount(); /** * <pre> * A list of possible final mobile URLs after all cross domain redirects. * </pre> * * <code>repeated string final_mobile_urls = 16;</code> * @param index The index of the element to return. * @return The finalMobileUrls at the given index. */ java.lang.String getFinalMobileUrls(int index); /** * <pre> * A list of possible final mobile URLs after all cross domain redirects. * </pre> * * <code>repeated string final_mobile_urls = 16;</code> * @param index The index of the value to return. * @return The bytes of the finalMobileUrls at the given index. */ com.google.protobuf.ByteString getFinalMobileUrlsBytes(int index); /** * <pre> * URL template for constructing a tracking URL. * </pre> * * <code>optional string tracking_url_template = 17;</code> * @return Whether the trackingUrlTemplate field is set. */ boolean hasTrackingUrlTemplate(); /** * <pre> * URL template for constructing a tracking URL. * </pre> * * <code>optional string tracking_url_template = 17;</code> * @return The trackingUrlTemplate. */ java.lang.String getTrackingUrlTemplate(); /** * <pre> * URL template for constructing a tracking URL. * </pre> * * <code>optional string tracking_url_template = 17;</code> * @return The bytes for trackingUrlTemplate. */ com.google.protobuf.ByteString getTrackingUrlTemplateBytes(); /** * <pre> * A list of mappings to be used for substituting URL custom parameter tags in * the tracking_url_template, final_urls, and/or final_mobile_urls. * </pre> * * <code>repeated .google.ads.googleads.v19.common.CustomParameter url_custom_parameters = 18;</code> */ java.util.List<com.google.ads.googleads.v19.common.CustomParameter> getUrlCustomParametersList(); /** * <pre> * A list of mappings to be used for substituting URL custom parameter tags in * the tracking_url_template, final_urls, and/or final_mobile_urls. * </pre> * * <code>repeated .google.ads.googleads.v19.common.CustomParameter url_custom_parameters = 18;</code> */ com.google.ads.googleads.v19.common.CustomParameter getUrlCustomParameters(int index); /** * <pre> * A list of mappings to be used for substituting URL custom parameter tags in * the tracking_url_template, final_urls, and/or final_mobile_urls. * </pre> * * <code>repeated .google.ads.googleads.v19.common.CustomParameter url_custom_parameters = 18;</code> */ int getUrlCustomParametersCount(); /** * <pre> * A list of mappings to be used for substituting URL custom parameter tags in * the tracking_url_template, final_urls, and/or final_mobile_urls. * </pre> * * <code>repeated .google.ads.googleads.v19.common.CustomParameter url_custom_parameters = 18;</code> */ java.util.List<? extends com.google.ads.googleads.v19.common.CustomParameterOrBuilder> getUrlCustomParametersOrBuilderList(); /** * <pre> * A list of mappings to be used for substituting URL custom parameter tags in * the tracking_url_template, final_urls, and/or final_mobile_urls. * </pre> * * <code>repeated .google.ads.googleads.v19.common.CustomParameter url_custom_parameters = 18;</code> */ com.google.ads.googleads.v19.common.CustomParameterOrBuilder getUrlCustomParametersOrBuilder( int index); /** * <pre> * URL template for appending params to landing page URLs served with parallel * tracking. * </pre> * * <code>optional string final_url_suffix = 19;</code> * @return Whether the finalUrlSuffix field is set. */ boolean hasFinalUrlSuffix(); /** * <pre> * URL template for appending params to landing page URLs served with parallel * tracking. * </pre> * * <code>optional string final_url_suffix = 19;</code> * @return The finalUrlSuffix. */ java.lang.String getFinalUrlSuffix(); /** * <pre> * URL template for appending params to landing page URLs served with parallel * tracking. * </pre> * * <code>optional string final_url_suffix = 19;</code> * @return The bytes for finalUrlSuffix. */ com.google.protobuf.ByteString getFinalUrlSuffixBytes(); /** * <pre> * Output only. Source of the asset. * </pre> * * <code>.google.ads.googleads.v19.enums.AssetSourceEnum.AssetSource source = 38 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The enum numeric value on the wire for source. */ int getSourceValue(); /** * <pre> * Output only. Source of the asset. * </pre> * * <code>.google.ads.googleads.v19.enums.AssetSourceEnum.AssetSource source = 38 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The source. */ com.google.ads.googleads.v19.enums.AssetSourceEnum.AssetSource getSource(); /** * <pre> * Output only. Policy information for the asset. * </pre> * * <code>.google.ads.googleads.v19.resources.AssetPolicySummary policy_summary = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return Whether the policySummary field is set. */ boolean hasPolicySummary(); /** * <pre> * Output only. Policy information for the asset. * </pre> * * <code>.google.ads.googleads.v19.resources.AssetPolicySummary policy_summary = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The policySummary. */ com.google.ads.googleads.v19.resources.AssetPolicySummary getPolicySummary(); /** * <pre> * Output only. Policy information for the asset. * </pre> * * <code>.google.ads.googleads.v19.resources.AssetPolicySummary policy_summary = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> */ com.google.ads.googleads.v19.resources.AssetPolicySummaryOrBuilder getPolicySummaryOrBuilder(); /** * <pre> * Output only. Policy information for the asset for each FieldType. * </pre> * * <code>repeated .google.ads.googleads.v19.resources.AssetFieldTypePolicySummary field_type_policy_summaries = 40 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> */ java.util.List<com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary> getFieldTypePolicySummariesList(); /** * <pre> * Output only. Policy information for the asset for each FieldType. * </pre> * * <code>repeated .google.ads.googleads.v19.resources.AssetFieldTypePolicySummary field_type_policy_summaries = 40 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> */ com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummary getFieldTypePolicySummaries(int index); /** * <pre> * Output only. Policy information for the asset for each FieldType. * </pre> * * <code>repeated .google.ads.googleads.v19.resources.AssetFieldTypePolicySummary field_type_policy_summaries = 40 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> */ int getFieldTypePolicySummariesCount(); /** * <pre> * Output only. Policy information for the asset for each FieldType. * </pre> * * <code>repeated .google.ads.googleads.v19.resources.AssetFieldTypePolicySummary field_type_policy_summaries = 40 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> */ java.util.List<? extends com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummaryOrBuilder> getFieldTypePolicySummariesOrBuilderList(); /** * <pre> * Output only. Policy information for the asset for each FieldType. * </pre> * * <code>repeated .google.ads.googleads.v19.resources.AssetFieldTypePolicySummary field_type_policy_summaries = 40 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> */ com.google.ads.googleads.v19.resources.AssetFieldTypePolicySummaryOrBuilder getFieldTypePolicySummariesOrBuilder( int index); /** * <pre> * Immutable. A YouTube video asset. * </pre> * * <code>.google.ads.googleads.v19.common.YoutubeVideoAsset youtube_video_asset = 5 [(.google.api.field_behavior) = IMMUTABLE];</code> * @return Whether the youtubeVideoAsset field is set. */ boolean hasYoutubeVideoAsset(); /** * <pre> * Immutable. A YouTube video asset. * </pre> * * <code>.google.ads.googleads.v19.common.YoutubeVideoAsset youtube_video_asset = 5 [(.google.api.field_behavior) = IMMUTABLE];</code> * @return The youtubeVideoAsset. */ com.google.ads.googleads.v19.common.YoutubeVideoAsset getYoutubeVideoAsset(); /** * <pre> * Immutable. A YouTube video asset. * </pre> * * <code>.google.ads.googleads.v19.common.YoutubeVideoAsset youtube_video_asset = 5 [(.google.api.field_behavior) = IMMUTABLE];</code> */ com.google.ads.googleads.v19.common.YoutubeVideoAssetOrBuilder getYoutubeVideoAssetOrBuilder(); /** * <pre> * Immutable. A media bundle asset. * </pre> * * <code>.google.ads.googleads.v19.common.MediaBundleAsset media_bundle_asset = 6 [(.google.api.field_behavior) = IMMUTABLE];</code> * @return Whether the mediaBundleAsset field is set. */ boolean hasMediaBundleAsset(); /** * <pre> * Immutable. A media bundle asset. * </pre> * * <code>.google.ads.googleads.v19.common.MediaBundleAsset media_bundle_asset = 6 [(.google.api.field_behavior) = IMMUTABLE];</code> * @return The mediaBundleAsset. */ com.google.ads.googleads.v19.common.MediaBundleAsset getMediaBundleAsset(); /** * <pre> * Immutable. A media bundle asset. * </pre> * * <code>.google.ads.googleads.v19.common.MediaBundleAsset media_bundle_asset = 6 [(.google.api.field_behavior) = IMMUTABLE];</code> */ com.google.ads.googleads.v19.common.MediaBundleAssetOrBuilder getMediaBundleAssetOrBuilder(); /** * <pre> * Output only. An image asset. * </pre> * * <code>.google.ads.googleads.v19.common.ImageAsset image_asset = 7 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return Whether the imageAsset field is set. */ boolean hasImageAsset(); /** * <pre> * Output only. An image asset. * </pre> * * <code>.google.ads.googleads.v19.common.ImageAsset image_asset = 7 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The imageAsset. */ com.google.ads.googleads.v19.common.ImageAsset getImageAsset(); /** * <pre> * Output only. An image asset. * </pre> * * <code>.google.ads.googleads.v19.common.ImageAsset image_asset = 7 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> */ com.google.ads.googleads.v19.common.ImageAssetOrBuilder getImageAssetOrBuilder(); /** * <pre> * Immutable. A text asset. * </pre> * * <code>.google.ads.googleads.v19.common.TextAsset text_asset = 8 [(.google.api.field_behavior) = IMMUTABLE];</code> * @return Whether the textAsset field is set. */ boolean hasTextAsset(); /** * <pre> * Immutable. A text asset. * </pre> * * <code>.google.ads.googleads.v19.common.TextAsset text_asset = 8 [(.google.api.field_behavior) = IMMUTABLE];</code> * @return The textAsset. */ com.google.ads.googleads.v19.common.TextAsset getTextAsset(); /** * <pre> * Immutable. A text asset. * </pre> * * <code>.google.ads.googleads.v19.common.TextAsset text_asset = 8 [(.google.api.field_behavior) = IMMUTABLE];</code> */ com.google.ads.googleads.v19.common.TextAssetOrBuilder getTextAssetOrBuilder(); /** * <pre> * A lead form asset. * </pre> * * <code>.google.ads.googleads.v19.common.LeadFormAsset lead_form_asset = 9;</code> * @return Whether the leadFormAsset field is set. */ boolean hasLeadFormAsset(); /** * <pre> * A lead form asset. * </pre> * * <code>.google.ads.googleads.v19.common.LeadFormAsset lead_form_asset = 9;</code> * @return The leadFormAsset. */ com.google.ads.googleads.v19.common.LeadFormAsset getLeadFormAsset(); /** * <pre> * A lead form asset. * </pre> * * <code>.google.ads.googleads.v19.common.LeadFormAsset lead_form_asset = 9;</code> */ com.google.ads.googleads.v19.common.LeadFormAssetOrBuilder getLeadFormAssetOrBuilder(); /** * <pre> * A book on google asset. * </pre> * * <code>.google.ads.googleads.v19.common.BookOnGoogleAsset book_on_google_asset = 10;</code> * @return Whether the bookOnGoogleAsset field is set. */ boolean hasBookOnGoogleAsset(); /** * <pre> * A book on google asset. * </pre> * * <code>.google.ads.googleads.v19.common.BookOnGoogleAsset book_on_google_asset = 10;</code> * @return The bookOnGoogleAsset. */ com.google.ads.googleads.v19.common.BookOnGoogleAsset getBookOnGoogleAsset(); /** * <pre> * A book on google asset. * </pre> * * <code>.google.ads.googleads.v19.common.BookOnGoogleAsset book_on_google_asset = 10;</code> */ com.google.ads.googleads.v19.common.BookOnGoogleAssetOrBuilder getBookOnGoogleAssetOrBuilder(); /** * <pre> * A promotion asset. * </pre> * * <code>.google.ads.googleads.v19.common.PromotionAsset promotion_asset = 15;</code> * @return Whether the promotionAsset field is set. */ boolean hasPromotionAsset(); /** * <pre> * A promotion asset. * </pre> * * <code>.google.ads.googleads.v19.common.PromotionAsset promotion_asset = 15;</code> * @return The promotionAsset. */ com.google.ads.googleads.v19.common.PromotionAsset getPromotionAsset(); /** * <pre> * A promotion asset. * </pre> * * <code>.google.ads.googleads.v19.common.PromotionAsset promotion_asset = 15;</code> */ com.google.ads.googleads.v19.common.PromotionAssetOrBuilder getPromotionAssetOrBuilder(); /** * <pre> * A callout asset. * </pre> * * <code>.google.ads.googleads.v19.common.CalloutAsset callout_asset = 20;</code> * @return Whether the calloutAsset field is set. */ boolean hasCalloutAsset(); /** * <pre> * A callout asset. * </pre> * * <code>.google.ads.googleads.v19.common.CalloutAsset callout_asset = 20;</code> * @return The calloutAsset. */ com.google.ads.googleads.v19.common.CalloutAsset getCalloutAsset(); /** * <pre> * A callout asset. * </pre> * * <code>.google.ads.googleads.v19.common.CalloutAsset callout_asset = 20;</code> */ com.google.ads.googleads.v19.common.CalloutAssetOrBuilder getCalloutAssetOrBuilder(); /** * <pre> * A structured snippet asset. * </pre> * * <code>.google.ads.googleads.v19.common.StructuredSnippetAsset structured_snippet_asset = 21;</code> * @return Whether the structuredSnippetAsset field is set. */ boolean hasStructuredSnippetAsset(); /** * <pre> * A structured snippet asset. * </pre> * * <code>.google.ads.googleads.v19.common.StructuredSnippetAsset structured_snippet_asset = 21;</code> * @return The structuredSnippetAsset. */ com.google.ads.googleads.v19.common.StructuredSnippetAsset getStructuredSnippetAsset(); /** * <pre> * A structured snippet asset. * </pre> * * <code>.google.ads.googleads.v19.common.StructuredSnippetAsset structured_snippet_asset = 21;</code> */ com.google.ads.googleads.v19.common.StructuredSnippetAssetOrBuilder getStructuredSnippetAssetOrBuilder(); /** * <pre> * A sitelink asset. * </pre> * * <code>.google.ads.googleads.v19.common.SitelinkAsset sitelink_asset = 22;</code> * @return Whether the sitelinkAsset field is set. */ boolean hasSitelinkAsset(); /** * <pre> * A sitelink asset. * </pre> * * <code>.google.ads.googleads.v19.common.SitelinkAsset sitelink_asset = 22;</code> * @return The sitelinkAsset. */ com.google.ads.googleads.v19.common.SitelinkAsset getSitelinkAsset(); /** * <pre> * A sitelink asset. * </pre> * * <code>.google.ads.googleads.v19.common.SitelinkAsset sitelink_asset = 22;</code> */ com.google.ads.googleads.v19.common.SitelinkAssetOrBuilder getSitelinkAssetOrBuilder(); /** * <pre> * A page feed asset. * </pre> * * <code>.google.ads.googleads.v19.common.PageFeedAsset page_feed_asset = 23;</code> * @return Whether the pageFeedAsset field is set. */ boolean hasPageFeedAsset(); /** * <pre> * A page feed asset. * </pre> * * <code>.google.ads.googleads.v19.common.PageFeedAsset page_feed_asset = 23;</code> * @return The pageFeedAsset. */ com.google.ads.googleads.v19.common.PageFeedAsset getPageFeedAsset(); /** * <pre> * A page feed asset. * </pre> * * <code>.google.ads.googleads.v19.common.PageFeedAsset page_feed_asset = 23;</code> */ com.google.ads.googleads.v19.common.PageFeedAssetOrBuilder getPageFeedAssetOrBuilder(); /** * <pre> * A dynamic education asset. * </pre> * * <code>.google.ads.googleads.v19.common.DynamicEducationAsset dynamic_education_asset = 24;</code> * @return Whether the dynamicEducationAsset field is set. */ boolean hasDynamicEducationAsset(); /** * <pre> * A dynamic education asset. * </pre> * * <code>.google.ads.googleads.v19.common.DynamicEducationAsset dynamic_education_asset = 24;</code> * @return The dynamicEducationAsset. */ com.google.ads.googleads.v19.common.DynamicEducationAsset getDynamicEducationAsset(); /** * <pre> * A dynamic education asset. * </pre> * * <code>.google.ads.googleads.v19.common.DynamicEducationAsset dynamic_education_asset = 24;</code> */ com.google.ads.googleads.v19.common.DynamicEducationAssetOrBuilder getDynamicEducationAssetOrBuilder(); /** * <pre> * A mobile app asset. * </pre> * * <code>.google.ads.googleads.v19.common.MobileAppAsset mobile_app_asset = 25;</code> * @return Whether the mobileAppAsset field is set. */ boolean hasMobileAppAsset(); /** * <pre> * A mobile app asset. * </pre> * * <code>.google.ads.googleads.v19.common.MobileAppAsset mobile_app_asset = 25;</code> * @return The mobileAppAsset. */ com.google.ads.googleads.v19.common.MobileAppAsset getMobileAppAsset(); /** * <pre> * A mobile app asset. * </pre> * * <code>.google.ads.googleads.v19.common.MobileAppAsset mobile_app_asset = 25;</code> */ com.google.ads.googleads.v19.common.MobileAppAssetOrBuilder getMobileAppAssetOrBuilder(); /** * <pre> * A hotel callout asset. * </pre> * * <code>.google.ads.googleads.v19.common.HotelCalloutAsset hotel_callout_asset = 26;</code> * @return Whether the hotelCalloutAsset field is set. */ boolean hasHotelCalloutAsset(); /** * <pre> * A hotel callout asset. * </pre> * * <code>.google.ads.googleads.v19.common.HotelCalloutAsset hotel_callout_asset = 26;</code> * @return The hotelCalloutAsset. */ com.google.ads.googleads.v19.common.HotelCalloutAsset getHotelCalloutAsset(); /** * <pre> * A hotel callout asset. * </pre> * * <code>.google.ads.googleads.v19.common.HotelCalloutAsset hotel_callout_asset = 26;</code> */ com.google.ads.googleads.v19.common.HotelCalloutAssetOrBuilder getHotelCalloutAssetOrBuilder(); /** * <pre> * A call asset. * </pre> * * <code>.google.ads.googleads.v19.common.CallAsset call_asset = 27;</code> * @return Whether the callAsset field is set. */ boolean hasCallAsset(); /** * <pre> * A call asset. * </pre> * * <code>.google.ads.googleads.v19.common.CallAsset call_asset = 27;</code> * @return The callAsset. */ com.google.ads.googleads.v19.common.CallAsset getCallAsset(); /** * <pre> * A call asset. * </pre> * * <code>.google.ads.googleads.v19.common.CallAsset call_asset = 27;</code> */ com.google.ads.googleads.v19.common.CallAssetOrBuilder getCallAssetOrBuilder(); /** * <pre> * A price asset. * </pre> * * <code>.google.ads.googleads.v19.common.PriceAsset price_asset = 28;</code> * @return Whether the priceAsset field is set. */ boolean hasPriceAsset(); /** * <pre> * A price asset. * </pre> * * <code>.google.ads.googleads.v19.common.PriceAsset price_asset = 28;</code> * @return The priceAsset. */ com.google.ads.googleads.v19.common.PriceAsset getPriceAsset(); /** * <pre> * A price asset. * </pre> * * <code>.google.ads.googleads.v19.common.PriceAsset price_asset = 28;</code> */ com.google.ads.googleads.v19.common.PriceAssetOrBuilder getPriceAssetOrBuilder(); /** * <pre> * Immutable. A call to action asset. * </pre> * * <code>.google.ads.googleads.v19.common.CallToActionAsset call_to_action_asset = 29 [(.google.api.field_behavior) = IMMUTABLE];</code> * @return Whether the callToActionAsset field is set. */ boolean hasCallToActionAsset(); /** * <pre> * Immutable. A call to action asset. * </pre> * * <code>.google.ads.googleads.v19.common.CallToActionAsset call_to_action_asset = 29 [(.google.api.field_behavior) = IMMUTABLE];</code> * @return The callToActionAsset. */ com.google.ads.googleads.v19.common.CallToActionAsset getCallToActionAsset(); /** * <pre> * Immutable. A call to action asset. * </pre> * * <code>.google.ads.googleads.v19.common.CallToActionAsset call_to_action_asset = 29 [(.google.api.field_behavior) = IMMUTABLE];</code> */ com.google.ads.googleads.v19.common.CallToActionAssetOrBuilder getCallToActionAssetOrBuilder(); /** * <pre> * A dynamic real estate asset. * </pre> * * <code>.google.ads.googleads.v19.common.DynamicRealEstateAsset dynamic_real_estate_asset = 30;</code> * @return Whether the dynamicRealEstateAsset field is set. */ boolean hasDynamicRealEstateAsset(); /** * <pre> * A dynamic real estate asset. * </pre> * * <code>.google.ads.googleads.v19.common.DynamicRealEstateAsset dynamic_real_estate_asset = 30;</code> * @return The dynamicRealEstateAsset. */ com.google.ads.googleads.v19.common.DynamicRealEstateAsset getDynamicRealEstateAsset(); /** * <pre> * A dynamic real estate asset. * </pre> * * <code>.google.ads.googleads.v19.common.DynamicRealEstateAsset dynamic_real_estate_asset = 30;</code> */ com.google.ads.googleads.v19.common.DynamicRealEstateAssetOrBuilder getDynamicRealEstateAssetOrBuilder(); /** * <pre> * A dynamic custom asset. * </pre> * * <code>.google.ads.googleads.v19.common.DynamicCustomAsset dynamic_custom_asset = 31;</code> * @return Whether the dynamicCustomAsset field is set. */ boolean hasDynamicCustomAsset(); /** * <pre> * A dynamic custom asset. * </pre> * * <code>.google.ads.googleads.v19.common.DynamicCustomAsset dynamic_custom_asset = 31;</code> * @return The dynamicCustomAsset. */ com.google.ads.googleads.v19.common.DynamicCustomAsset getDynamicCustomAsset(); /** * <pre> * A dynamic custom asset. * </pre> * * <code>.google.ads.googleads.v19.common.DynamicCustomAsset dynamic_custom_asset = 31;</code> */ com.google.ads.googleads.v19.common.DynamicCustomAssetOrBuilder getDynamicCustomAssetOrBuilder(); /** * <pre> * A dynamic hotels and rentals asset. * </pre> * * <code>.google.ads.googleads.v19.common.DynamicHotelsAndRentalsAsset dynamic_hotels_and_rentals_asset = 32;</code> * @return Whether the dynamicHotelsAndRentalsAsset field is set. */ boolean hasDynamicHotelsAndRentalsAsset(); /** * <pre> * A dynamic hotels and rentals asset. * </pre> * * <code>.google.ads.googleads.v19.common.DynamicHotelsAndRentalsAsset dynamic_hotels_and_rentals_asset = 32;</code> * @return The dynamicHotelsAndRentalsAsset. */ com.google.ads.googleads.v19.common.DynamicHotelsAndRentalsAsset getDynamicHotelsAndRentalsAsset(); /** * <pre> * A dynamic hotels and rentals asset. * </pre> * * <code>.google.ads.googleads.v19.common.DynamicHotelsAndRentalsAsset dynamic_hotels_and_rentals_asset = 32;</code> */ com.google.ads.googleads.v19.common.DynamicHotelsAndRentalsAssetOrBuilder getDynamicHotelsAndRentalsAssetOrBuilder(); /** * <pre> * A dynamic flights asset. * </pre> * * <code>.google.ads.googleads.v19.common.DynamicFlightsAsset dynamic_flights_asset = 33;</code> * @return Whether the dynamicFlightsAsset field is set. */ boolean hasDynamicFlightsAsset(); /** * <pre> * A dynamic flights asset. * </pre> * * <code>.google.ads.googleads.v19.common.DynamicFlightsAsset dynamic_flights_asset = 33;</code> * @return The dynamicFlightsAsset. */ com.google.ads.googleads.v19.common.DynamicFlightsAsset getDynamicFlightsAsset(); /** * <pre> * A dynamic flights asset. * </pre> * * <code>.google.ads.googleads.v19.common.DynamicFlightsAsset dynamic_flights_asset = 33;</code> */ com.google.ads.googleads.v19.common.DynamicFlightsAssetOrBuilder getDynamicFlightsAssetOrBuilder(); /** * <pre> * Immutable. A Demand Gen carousel card asset. * </pre> * * <code>.google.ads.googleads.v19.common.DemandGenCarouselCardAsset demand_gen_carousel_card_asset = 50 [(.google.api.field_behavior) = IMMUTABLE];</code> * @return Whether the demandGenCarouselCardAsset field is set. */ boolean hasDemandGenCarouselCardAsset(); /** * <pre> * Immutable. A Demand Gen carousel card asset. * </pre> * * <code>.google.ads.googleads.v19.common.DemandGenCarouselCardAsset demand_gen_carousel_card_asset = 50 [(.google.api.field_behavior) = IMMUTABLE];</code> * @return The demandGenCarouselCardAsset. */ com.google.ads.googleads.v19.common.DemandGenCarouselCardAsset getDemandGenCarouselCardAsset(); /** * <pre> * Immutable. A Demand Gen carousel card asset. * </pre> * * <code>.google.ads.googleads.v19.common.DemandGenCarouselCardAsset demand_gen_carousel_card_asset = 50 [(.google.api.field_behavior) = IMMUTABLE];</code> */ com.google.ads.googleads.v19.common.DemandGenCarouselCardAssetOrBuilder getDemandGenCarouselCardAssetOrBuilder(); /** * <pre> * A dynamic travel asset. * </pre> * * <code>.google.ads.googleads.v19.common.DynamicTravelAsset dynamic_travel_asset = 35;</code> * @return Whether the dynamicTravelAsset field is set. */ boolean hasDynamicTravelAsset(); /** * <pre> * A dynamic travel asset. * </pre> * * <code>.google.ads.googleads.v19.common.DynamicTravelAsset dynamic_travel_asset = 35;</code> * @return The dynamicTravelAsset. */ com.google.ads.googleads.v19.common.DynamicTravelAsset getDynamicTravelAsset(); /** * <pre> * A dynamic travel asset. * </pre> * * <code>.google.ads.googleads.v19.common.DynamicTravelAsset dynamic_travel_asset = 35;</code> */ com.google.ads.googleads.v19.common.DynamicTravelAssetOrBuilder getDynamicTravelAssetOrBuilder(); /** * <pre> * A dynamic local asset. * </pre> * * <code>.google.ads.googleads.v19.common.DynamicLocalAsset dynamic_local_asset = 36;</code> * @return Whether the dynamicLocalAsset field is set. */ boolean hasDynamicLocalAsset(); /** * <pre> * A dynamic local asset. * </pre> * * <code>.google.ads.googleads.v19.common.DynamicLocalAsset dynamic_local_asset = 36;</code> * @return The dynamicLocalAsset. */ com.google.ads.googleads.v19.common.DynamicLocalAsset getDynamicLocalAsset(); /** * <pre> * A dynamic local asset. * </pre> * * <code>.google.ads.googleads.v19.common.DynamicLocalAsset dynamic_local_asset = 36;</code> */ com.google.ads.googleads.v19.common.DynamicLocalAssetOrBuilder getDynamicLocalAssetOrBuilder(); /** * <pre> * A dynamic jobs asset. * </pre> * * <code>.google.ads.googleads.v19.common.DynamicJobsAsset dynamic_jobs_asset = 37;</code> * @return Whether the dynamicJobsAsset field is set. */ boolean hasDynamicJobsAsset(); /** * <pre> * A dynamic jobs asset. * </pre> * * <code>.google.ads.googleads.v19.common.DynamicJobsAsset dynamic_jobs_asset = 37;</code> * @return The dynamicJobsAsset. */ com.google.ads.googleads.v19.common.DynamicJobsAsset getDynamicJobsAsset(); /** * <pre> * A dynamic jobs asset. * </pre> * * <code>.google.ads.googleads.v19.common.DynamicJobsAsset dynamic_jobs_asset = 37;</code> */ com.google.ads.googleads.v19.common.DynamicJobsAssetOrBuilder getDynamicJobsAssetOrBuilder(); /** * <pre> * Output only. A location asset. * </pre> * * <code>.google.ads.googleads.v19.common.LocationAsset location_asset = 39 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return Whether the locationAsset field is set. */ boolean hasLocationAsset(); /** * <pre> * Output only. A location asset. * </pre> * * <code>.google.ads.googleads.v19.common.LocationAsset location_asset = 39 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The locationAsset. */ com.google.ads.googleads.v19.common.LocationAsset getLocationAsset(); /** * <pre> * Output only. A location asset. * </pre> * * <code>.google.ads.googleads.v19.common.LocationAsset location_asset = 39 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> */ com.google.ads.googleads.v19.common.LocationAssetOrBuilder getLocationAssetOrBuilder(); /** * <pre> * Immutable. A hotel property asset. * </pre> * * <code>.google.ads.googleads.v19.common.HotelPropertyAsset hotel_property_asset = 41 [(.google.api.field_behavior) = IMMUTABLE];</code> * @return Whether the hotelPropertyAsset field is set. */ boolean hasHotelPropertyAsset(); /** * <pre> * Immutable. A hotel property asset. * </pre> * * <code>.google.ads.googleads.v19.common.HotelPropertyAsset hotel_property_asset = 41 [(.google.api.field_behavior) = IMMUTABLE];</code> * @return The hotelPropertyAsset. */ com.google.ads.googleads.v19.common.HotelPropertyAsset getHotelPropertyAsset(); /** * <pre> * Immutable. A hotel property asset. * </pre> * * <code>.google.ads.googleads.v19.common.HotelPropertyAsset hotel_property_asset = 41 [(.google.api.field_behavior) = IMMUTABLE];</code> */ com.google.ads.googleads.v19.common.HotelPropertyAssetOrBuilder getHotelPropertyAssetOrBuilder(); /** * <pre> * A business message asset. * </pre> * * <code>.google.ads.googleads.v19.common.BusinessMessageAsset business_message_asset = 51;</code> * @return Whether the businessMessageAsset field is set. */ boolean hasBusinessMessageAsset(); /** * <pre> * A business message asset. * </pre> * * <code>.google.ads.googleads.v19.common.BusinessMessageAsset business_message_asset = 51;</code> * @return The businessMessageAsset. */ com.google.ads.googleads.v19.common.BusinessMessageAsset getBusinessMessageAsset(); /** * <pre> * A business message asset. * </pre> * * <code>.google.ads.googleads.v19.common.BusinessMessageAsset business_message_asset = 51;</code> */ com.google.ads.googleads.v19.common.BusinessMessageAssetOrBuilder getBusinessMessageAssetOrBuilder(); /** * <pre> * Immutable. An app deep link asset. * </pre> * * <code>.google.ads.googleads.v19.common.AppDeepLinkAsset app_deep_link_asset = 52 [(.google.api.field_behavior) = IMMUTABLE];</code> * @return Whether the appDeepLinkAsset field is set. */ boolean hasAppDeepLinkAsset(); /** * <pre> * Immutable. An app deep link asset. * </pre> * * <code>.google.ads.googleads.v19.common.AppDeepLinkAsset app_deep_link_asset = 52 [(.google.api.field_behavior) = IMMUTABLE];</code> * @return The appDeepLinkAsset. */ com.google.ads.googleads.v19.common.AppDeepLinkAsset getAppDeepLinkAsset(); /** * <pre> * Immutable. An app deep link asset. * </pre> * * <code>.google.ads.googleads.v19.common.AppDeepLinkAsset app_deep_link_asset = 52 [(.google.api.field_behavior) = IMMUTABLE];</code> */ com.google.ads.googleads.v19.common.AppDeepLinkAssetOrBuilder getAppDeepLinkAssetOrBuilder(); com.google.ads.googleads.v19.resources.Asset.AssetDataCase getAssetDataCase(); }
googleads/google-ads-java
36,596
google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/resources/AssetOrBuilder.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v20/resources/asset.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v20.resources; public interface AssetOrBuilder extends // @@protoc_insertion_point(interface_extends:google.ads.googleads.v20.resources.Asset) com.google.protobuf.MessageOrBuilder { /** * <pre> * Immutable. The resource name of the asset. * Asset resource names have the form: * * `customers/{customer_id}/assets/{asset_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The resourceName. */ java.lang.String getResourceName(); /** * <pre> * Immutable. The resource name of the asset. * Asset resource names have the form: * * `customers/{customer_id}/assets/{asset_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for resourceName. */ com.google.protobuf.ByteString getResourceNameBytes(); /** * <pre> * Output only. The ID of the asset. * </pre> * * <code>optional int64 id = 11 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return Whether the id field is set. */ boolean hasId(); /** * <pre> * Output only. The ID of the asset. * </pre> * * <code>optional int64 id = 11 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The id. */ long getId(); /** * <pre> * Optional name of the asset. * </pre> * * <code>optional string name = 12;</code> * @return Whether the name field is set. */ boolean hasName(); /** * <pre> * Optional name of the asset. * </pre> * * <code>optional string name = 12;</code> * @return The name. */ java.lang.String getName(); /** * <pre> * Optional name of the asset. * </pre> * * <code>optional string name = 12;</code> * @return The bytes for name. */ com.google.protobuf.ByteString getNameBytes(); /** * <pre> * Output only. Type of the asset. * </pre> * * <code>.google.ads.googleads.v20.enums.AssetTypeEnum.AssetType type = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The enum numeric value on the wire for type. */ int getTypeValue(); /** * <pre> * Output only. Type of the asset. * </pre> * * <code>.google.ads.googleads.v20.enums.AssetTypeEnum.AssetType type = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The type. */ com.google.ads.googleads.v20.enums.AssetTypeEnum.AssetType getType(); /** * <pre> * A list of possible final URLs after all cross domain redirects. * </pre> * * <code>repeated string final_urls = 14;</code> * @return A list containing the finalUrls. */ java.util.List<java.lang.String> getFinalUrlsList(); /** * <pre> * A list of possible final URLs after all cross domain redirects. * </pre> * * <code>repeated string final_urls = 14;</code> * @return The count of finalUrls. */ int getFinalUrlsCount(); /** * <pre> * A list of possible final URLs after all cross domain redirects. * </pre> * * <code>repeated string final_urls = 14;</code> * @param index The index of the element to return. * @return The finalUrls at the given index. */ java.lang.String getFinalUrls(int index); /** * <pre> * A list of possible final URLs after all cross domain redirects. * </pre> * * <code>repeated string final_urls = 14;</code> * @param index The index of the value to return. * @return The bytes of the finalUrls at the given index. */ com.google.protobuf.ByteString getFinalUrlsBytes(int index); /** * <pre> * A list of possible final mobile URLs after all cross domain redirects. * </pre> * * <code>repeated string final_mobile_urls = 16;</code> * @return A list containing the finalMobileUrls. */ java.util.List<java.lang.String> getFinalMobileUrlsList(); /** * <pre> * A list of possible final mobile URLs after all cross domain redirects. * </pre> * * <code>repeated string final_mobile_urls = 16;</code> * @return The count of finalMobileUrls. */ int getFinalMobileUrlsCount(); /** * <pre> * A list of possible final mobile URLs after all cross domain redirects. * </pre> * * <code>repeated string final_mobile_urls = 16;</code> * @param index The index of the element to return. * @return The finalMobileUrls at the given index. */ java.lang.String getFinalMobileUrls(int index); /** * <pre> * A list of possible final mobile URLs after all cross domain redirects. * </pre> * * <code>repeated string final_mobile_urls = 16;</code> * @param index The index of the value to return. * @return The bytes of the finalMobileUrls at the given index. */ com.google.protobuf.ByteString getFinalMobileUrlsBytes(int index); /** * <pre> * URL template for constructing a tracking URL. * </pre> * * <code>optional string tracking_url_template = 17;</code> * @return Whether the trackingUrlTemplate field is set. */ boolean hasTrackingUrlTemplate(); /** * <pre> * URL template for constructing a tracking URL. * </pre> * * <code>optional string tracking_url_template = 17;</code> * @return The trackingUrlTemplate. */ java.lang.String getTrackingUrlTemplate(); /** * <pre> * URL template for constructing a tracking URL. * </pre> * * <code>optional string tracking_url_template = 17;</code> * @return The bytes for trackingUrlTemplate. */ com.google.protobuf.ByteString getTrackingUrlTemplateBytes(); /** * <pre> * A list of mappings to be used for substituting URL custom parameter tags in * the tracking_url_template, final_urls, and/or final_mobile_urls. * </pre> * * <code>repeated .google.ads.googleads.v20.common.CustomParameter url_custom_parameters = 18;</code> */ java.util.List<com.google.ads.googleads.v20.common.CustomParameter> getUrlCustomParametersList(); /** * <pre> * A list of mappings to be used for substituting URL custom parameter tags in * the tracking_url_template, final_urls, and/or final_mobile_urls. * </pre> * * <code>repeated .google.ads.googleads.v20.common.CustomParameter url_custom_parameters = 18;</code> */ com.google.ads.googleads.v20.common.CustomParameter getUrlCustomParameters(int index); /** * <pre> * A list of mappings to be used for substituting URL custom parameter tags in * the tracking_url_template, final_urls, and/or final_mobile_urls. * </pre> * * <code>repeated .google.ads.googleads.v20.common.CustomParameter url_custom_parameters = 18;</code> */ int getUrlCustomParametersCount(); /** * <pre> * A list of mappings to be used for substituting URL custom parameter tags in * the tracking_url_template, final_urls, and/or final_mobile_urls. * </pre> * * <code>repeated .google.ads.googleads.v20.common.CustomParameter url_custom_parameters = 18;</code> */ java.util.List<? extends com.google.ads.googleads.v20.common.CustomParameterOrBuilder> getUrlCustomParametersOrBuilderList(); /** * <pre> * A list of mappings to be used for substituting URL custom parameter tags in * the tracking_url_template, final_urls, and/or final_mobile_urls. * </pre> * * <code>repeated .google.ads.googleads.v20.common.CustomParameter url_custom_parameters = 18;</code> */ com.google.ads.googleads.v20.common.CustomParameterOrBuilder getUrlCustomParametersOrBuilder( int index); /** * <pre> * URL template for appending params to landing page URLs served with parallel * tracking. * </pre> * * <code>optional string final_url_suffix = 19;</code> * @return Whether the finalUrlSuffix field is set. */ boolean hasFinalUrlSuffix(); /** * <pre> * URL template for appending params to landing page URLs served with parallel * tracking. * </pre> * * <code>optional string final_url_suffix = 19;</code> * @return The finalUrlSuffix. */ java.lang.String getFinalUrlSuffix(); /** * <pre> * URL template for appending params to landing page URLs served with parallel * tracking. * </pre> * * <code>optional string final_url_suffix = 19;</code> * @return The bytes for finalUrlSuffix. */ com.google.protobuf.ByteString getFinalUrlSuffixBytes(); /** * <pre> * Output only. Source of the asset. * </pre> * * <code>.google.ads.googleads.v20.enums.AssetSourceEnum.AssetSource source = 38 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The enum numeric value on the wire for source. */ int getSourceValue(); /** * <pre> * Output only. Source of the asset. * </pre> * * <code>.google.ads.googleads.v20.enums.AssetSourceEnum.AssetSource source = 38 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The source. */ com.google.ads.googleads.v20.enums.AssetSourceEnum.AssetSource getSource(); /** * <pre> * Output only. Policy information for the asset. * </pre> * * <code>.google.ads.googleads.v20.resources.AssetPolicySummary policy_summary = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return Whether the policySummary field is set. */ boolean hasPolicySummary(); /** * <pre> * Output only. Policy information for the asset. * </pre> * * <code>.google.ads.googleads.v20.resources.AssetPolicySummary policy_summary = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The policySummary. */ com.google.ads.googleads.v20.resources.AssetPolicySummary getPolicySummary(); /** * <pre> * Output only. Policy information for the asset. * </pre> * * <code>.google.ads.googleads.v20.resources.AssetPolicySummary policy_summary = 13 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> */ com.google.ads.googleads.v20.resources.AssetPolicySummaryOrBuilder getPolicySummaryOrBuilder(); /** * <pre> * Output only. Policy information for the asset for each FieldType. * </pre> * * <code>repeated .google.ads.googleads.v20.resources.AssetFieldTypePolicySummary field_type_policy_summaries = 40 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> */ java.util.List<com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary> getFieldTypePolicySummariesList(); /** * <pre> * Output only. Policy information for the asset for each FieldType. * </pre> * * <code>repeated .google.ads.googleads.v20.resources.AssetFieldTypePolicySummary field_type_policy_summaries = 40 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> */ com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummary getFieldTypePolicySummaries(int index); /** * <pre> * Output only. Policy information for the asset for each FieldType. * </pre> * * <code>repeated .google.ads.googleads.v20.resources.AssetFieldTypePolicySummary field_type_policy_summaries = 40 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> */ int getFieldTypePolicySummariesCount(); /** * <pre> * Output only. Policy information for the asset for each FieldType. * </pre> * * <code>repeated .google.ads.googleads.v20.resources.AssetFieldTypePolicySummary field_type_policy_summaries = 40 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> */ java.util.List<? extends com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummaryOrBuilder> getFieldTypePolicySummariesOrBuilderList(); /** * <pre> * Output only. Policy information for the asset for each FieldType. * </pre> * * <code>repeated .google.ads.googleads.v20.resources.AssetFieldTypePolicySummary field_type_policy_summaries = 40 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> */ com.google.ads.googleads.v20.resources.AssetFieldTypePolicySummaryOrBuilder getFieldTypePolicySummariesOrBuilder( int index); /** * <pre> * Immutable. A YouTube video asset. * </pre> * * <code>.google.ads.googleads.v20.common.YoutubeVideoAsset youtube_video_asset = 5 [(.google.api.field_behavior) = IMMUTABLE];</code> * @return Whether the youtubeVideoAsset field is set. */ boolean hasYoutubeVideoAsset(); /** * <pre> * Immutable. A YouTube video asset. * </pre> * * <code>.google.ads.googleads.v20.common.YoutubeVideoAsset youtube_video_asset = 5 [(.google.api.field_behavior) = IMMUTABLE];</code> * @return The youtubeVideoAsset. */ com.google.ads.googleads.v20.common.YoutubeVideoAsset getYoutubeVideoAsset(); /** * <pre> * Immutable. A YouTube video asset. * </pre> * * <code>.google.ads.googleads.v20.common.YoutubeVideoAsset youtube_video_asset = 5 [(.google.api.field_behavior) = IMMUTABLE];</code> */ com.google.ads.googleads.v20.common.YoutubeVideoAssetOrBuilder getYoutubeVideoAssetOrBuilder(); /** * <pre> * Immutable. A media bundle asset. * </pre> * * <code>.google.ads.googleads.v20.common.MediaBundleAsset media_bundle_asset = 6 [(.google.api.field_behavior) = IMMUTABLE];</code> * @return Whether the mediaBundleAsset field is set. */ boolean hasMediaBundleAsset(); /** * <pre> * Immutable. A media bundle asset. * </pre> * * <code>.google.ads.googleads.v20.common.MediaBundleAsset media_bundle_asset = 6 [(.google.api.field_behavior) = IMMUTABLE];</code> * @return The mediaBundleAsset. */ com.google.ads.googleads.v20.common.MediaBundleAsset getMediaBundleAsset(); /** * <pre> * Immutable. A media bundle asset. * </pre> * * <code>.google.ads.googleads.v20.common.MediaBundleAsset media_bundle_asset = 6 [(.google.api.field_behavior) = IMMUTABLE];</code> */ com.google.ads.googleads.v20.common.MediaBundleAssetOrBuilder getMediaBundleAssetOrBuilder(); /** * <pre> * Output only. An image asset. * </pre> * * <code>.google.ads.googleads.v20.common.ImageAsset image_asset = 7 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return Whether the imageAsset field is set. */ boolean hasImageAsset(); /** * <pre> * Output only. An image asset. * </pre> * * <code>.google.ads.googleads.v20.common.ImageAsset image_asset = 7 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The imageAsset. */ com.google.ads.googleads.v20.common.ImageAsset getImageAsset(); /** * <pre> * Output only. An image asset. * </pre> * * <code>.google.ads.googleads.v20.common.ImageAsset image_asset = 7 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> */ com.google.ads.googleads.v20.common.ImageAssetOrBuilder getImageAssetOrBuilder(); /** * <pre> * Immutable. A text asset. * </pre> * * <code>.google.ads.googleads.v20.common.TextAsset text_asset = 8 [(.google.api.field_behavior) = IMMUTABLE];</code> * @return Whether the textAsset field is set. */ boolean hasTextAsset(); /** * <pre> * Immutable. A text asset. * </pre> * * <code>.google.ads.googleads.v20.common.TextAsset text_asset = 8 [(.google.api.field_behavior) = IMMUTABLE];</code> * @return The textAsset. */ com.google.ads.googleads.v20.common.TextAsset getTextAsset(); /** * <pre> * Immutable. A text asset. * </pre> * * <code>.google.ads.googleads.v20.common.TextAsset text_asset = 8 [(.google.api.field_behavior) = IMMUTABLE];</code> */ com.google.ads.googleads.v20.common.TextAssetOrBuilder getTextAssetOrBuilder(); /** * <pre> * A lead form asset. * </pre> * * <code>.google.ads.googleads.v20.common.LeadFormAsset lead_form_asset = 9;</code> * @return Whether the leadFormAsset field is set. */ boolean hasLeadFormAsset(); /** * <pre> * A lead form asset. * </pre> * * <code>.google.ads.googleads.v20.common.LeadFormAsset lead_form_asset = 9;</code> * @return The leadFormAsset. */ com.google.ads.googleads.v20.common.LeadFormAsset getLeadFormAsset(); /** * <pre> * A lead form asset. * </pre> * * <code>.google.ads.googleads.v20.common.LeadFormAsset lead_form_asset = 9;</code> */ com.google.ads.googleads.v20.common.LeadFormAssetOrBuilder getLeadFormAssetOrBuilder(); /** * <pre> * A book on google asset. * </pre> * * <code>.google.ads.googleads.v20.common.BookOnGoogleAsset book_on_google_asset = 10;</code> * @return Whether the bookOnGoogleAsset field is set. */ boolean hasBookOnGoogleAsset(); /** * <pre> * A book on google asset. * </pre> * * <code>.google.ads.googleads.v20.common.BookOnGoogleAsset book_on_google_asset = 10;</code> * @return The bookOnGoogleAsset. */ com.google.ads.googleads.v20.common.BookOnGoogleAsset getBookOnGoogleAsset(); /** * <pre> * A book on google asset. * </pre> * * <code>.google.ads.googleads.v20.common.BookOnGoogleAsset book_on_google_asset = 10;</code> */ com.google.ads.googleads.v20.common.BookOnGoogleAssetOrBuilder getBookOnGoogleAssetOrBuilder(); /** * <pre> * A promotion asset. * </pre> * * <code>.google.ads.googleads.v20.common.PromotionAsset promotion_asset = 15;</code> * @return Whether the promotionAsset field is set. */ boolean hasPromotionAsset(); /** * <pre> * A promotion asset. * </pre> * * <code>.google.ads.googleads.v20.common.PromotionAsset promotion_asset = 15;</code> * @return The promotionAsset. */ com.google.ads.googleads.v20.common.PromotionAsset getPromotionAsset(); /** * <pre> * A promotion asset. * </pre> * * <code>.google.ads.googleads.v20.common.PromotionAsset promotion_asset = 15;</code> */ com.google.ads.googleads.v20.common.PromotionAssetOrBuilder getPromotionAssetOrBuilder(); /** * <pre> * A callout asset. * </pre> * * <code>.google.ads.googleads.v20.common.CalloutAsset callout_asset = 20;</code> * @return Whether the calloutAsset field is set. */ boolean hasCalloutAsset(); /** * <pre> * A callout asset. * </pre> * * <code>.google.ads.googleads.v20.common.CalloutAsset callout_asset = 20;</code> * @return The calloutAsset. */ com.google.ads.googleads.v20.common.CalloutAsset getCalloutAsset(); /** * <pre> * A callout asset. * </pre> * * <code>.google.ads.googleads.v20.common.CalloutAsset callout_asset = 20;</code> */ com.google.ads.googleads.v20.common.CalloutAssetOrBuilder getCalloutAssetOrBuilder(); /** * <pre> * A structured snippet asset. * </pre> * * <code>.google.ads.googleads.v20.common.StructuredSnippetAsset structured_snippet_asset = 21;</code> * @return Whether the structuredSnippetAsset field is set. */ boolean hasStructuredSnippetAsset(); /** * <pre> * A structured snippet asset. * </pre> * * <code>.google.ads.googleads.v20.common.StructuredSnippetAsset structured_snippet_asset = 21;</code> * @return The structuredSnippetAsset. */ com.google.ads.googleads.v20.common.StructuredSnippetAsset getStructuredSnippetAsset(); /** * <pre> * A structured snippet asset. * </pre> * * <code>.google.ads.googleads.v20.common.StructuredSnippetAsset structured_snippet_asset = 21;</code> */ com.google.ads.googleads.v20.common.StructuredSnippetAssetOrBuilder getStructuredSnippetAssetOrBuilder(); /** * <pre> * A sitelink asset. * </pre> * * <code>.google.ads.googleads.v20.common.SitelinkAsset sitelink_asset = 22;</code> * @return Whether the sitelinkAsset field is set. */ boolean hasSitelinkAsset(); /** * <pre> * A sitelink asset. * </pre> * * <code>.google.ads.googleads.v20.common.SitelinkAsset sitelink_asset = 22;</code> * @return The sitelinkAsset. */ com.google.ads.googleads.v20.common.SitelinkAsset getSitelinkAsset(); /** * <pre> * A sitelink asset. * </pre> * * <code>.google.ads.googleads.v20.common.SitelinkAsset sitelink_asset = 22;</code> */ com.google.ads.googleads.v20.common.SitelinkAssetOrBuilder getSitelinkAssetOrBuilder(); /** * <pre> * A page feed asset. * </pre> * * <code>.google.ads.googleads.v20.common.PageFeedAsset page_feed_asset = 23;</code> * @return Whether the pageFeedAsset field is set. */ boolean hasPageFeedAsset(); /** * <pre> * A page feed asset. * </pre> * * <code>.google.ads.googleads.v20.common.PageFeedAsset page_feed_asset = 23;</code> * @return The pageFeedAsset. */ com.google.ads.googleads.v20.common.PageFeedAsset getPageFeedAsset(); /** * <pre> * A page feed asset. * </pre> * * <code>.google.ads.googleads.v20.common.PageFeedAsset page_feed_asset = 23;</code> */ com.google.ads.googleads.v20.common.PageFeedAssetOrBuilder getPageFeedAssetOrBuilder(); /** * <pre> * A dynamic education asset. * </pre> * * <code>.google.ads.googleads.v20.common.DynamicEducationAsset dynamic_education_asset = 24;</code> * @return Whether the dynamicEducationAsset field is set. */ boolean hasDynamicEducationAsset(); /** * <pre> * A dynamic education asset. * </pre> * * <code>.google.ads.googleads.v20.common.DynamicEducationAsset dynamic_education_asset = 24;</code> * @return The dynamicEducationAsset. */ com.google.ads.googleads.v20.common.DynamicEducationAsset getDynamicEducationAsset(); /** * <pre> * A dynamic education asset. * </pre> * * <code>.google.ads.googleads.v20.common.DynamicEducationAsset dynamic_education_asset = 24;</code> */ com.google.ads.googleads.v20.common.DynamicEducationAssetOrBuilder getDynamicEducationAssetOrBuilder(); /** * <pre> * A mobile app asset. * </pre> * * <code>.google.ads.googleads.v20.common.MobileAppAsset mobile_app_asset = 25;</code> * @return Whether the mobileAppAsset field is set. */ boolean hasMobileAppAsset(); /** * <pre> * A mobile app asset. * </pre> * * <code>.google.ads.googleads.v20.common.MobileAppAsset mobile_app_asset = 25;</code> * @return The mobileAppAsset. */ com.google.ads.googleads.v20.common.MobileAppAsset getMobileAppAsset(); /** * <pre> * A mobile app asset. * </pre> * * <code>.google.ads.googleads.v20.common.MobileAppAsset mobile_app_asset = 25;</code> */ com.google.ads.googleads.v20.common.MobileAppAssetOrBuilder getMobileAppAssetOrBuilder(); /** * <pre> * A hotel callout asset. * </pre> * * <code>.google.ads.googleads.v20.common.HotelCalloutAsset hotel_callout_asset = 26;</code> * @return Whether the hotelCalloutAsset field is set. */ boolean hasHotelCalloutAsset(); /** * <pre> * A hotel callout asset. * </pre> * * <code>.google.ads.googleads.v20.common.HotelCalloutAsset hotel_callout_asset = 26;</code> * @return The hotelCalloutAsset. */ com.google.ads.googleads.v20.common.HotelCalloutAsset getHotelCalloutAsset(); /** * <pre> * A hotel callout asset. * </pre> * * <code>.google.ads.googleads.v20.common.HotelCalloutAsset hotel_callout_asset = 26;</code> */ com.google.ads.googleads.v20.common.HotelCalloutAssetOrBuilder getHotelCalloutAssetOrBuilder(); /** * <pre> * A call asset. * </pre> * * <code>.google.ads.googleads.v20.common.CallAsset call_asset = 27;</code> * @return Whether the callAsset field is set. */ boolean hasCallAsset(); /** * <pre> * A call asset. * </pre> * * <code>.google.ads.googleads.v20.common.CallAsset call_asset = 27;</code> * @return The callAsset. */ com.google.ads.googleads.v20.common.CallAsset getCallAsset(); /** * <pre> * A call asset. * </pre> * * <code>.google.ads.googleads.v20.common.CallAsset call_asset = 27;</code> */ com.google.ads.googleads.v20.common.CallAssetOrBuilder getCallAssetOrBuilder(); /** * <pre> * A price asset. * </pre> * * <code>.google.ads.googleads.v20.common.PriceAsset price_asset = 28;</code> * @return Whether the priceAsset field is set. */ boolean hasPriceAsset(); /** * <pre> * A price asset. * </pre> * * <code>.google.ads.googleads.v20.common.PriceAsset price_asset = 28;</code> * @return The priceAsset. */ com.google.ads.googleads.v20.common.PriceAsset getPriceAsset(); /** * <pre> * A price asset. * </pre> * * <code>.google.ads.googleads.v20.common.PriceAsset price_asset = 28;</code> */ com.google.ads.googleads.v20.common.PriceAssetOrBuilder getPriceAssetOrBuilder(); /** * <pre> * Immutable. A call to action asset. * </pre> * * <code>.google.ads.googleads.v20.common.CallToActionAsset call_to_action_asset = 29 [(.google.api.field_behavior) = IMMUTABLE];</code> * @return Whether the callToActionAsset field is set. */ boolean hasCallToActionAsset(); /** * <pre> * Immutable. A call to action asset. * </pre> * * <code>.google.ads.googleads.v20.common.CallToActionAsset call_to_action_asset = 29 [(.google.api.field_behavior) = IMMUTABLE];</code> * @return The callToActionAsset. */ com.google.ads.googleads.v20.common.CallToActionAsset getCallToActionAsset(); /** * <pre> * Immutable. A call to action asset. * </pre> * * <code>.google.ads.googleads.v20.common.CallToActionAsset call_to_action_asset = 29 [(.google.api.field_behavior) = IMMUTABLE];</code> */ com.google.ads.googleads.v20.common.CallToActionAssetOrBuilder getCallToActionAssetOrBuilder(); /** * <pre> * A dynamic real estate asset. * </pre> * * <code>.google.ads.googleads.v20.common.DynamicRealEstateAsset dynamic_real_estate_asset = 30;</code> * @return Whether the dynamicRealEstateAsset field is set. */ boolean hasDynamicRealEstateAsset(); /** * <pre> * A dynamic real estate asset. * </pre> * * <code>.google.ads.googleads.v20.common.DynamicRealEstateAsset dynamic_real_estate_asset = 30;</code> * @return The dynamicRealEstateAsset. */ com.google.ads.googleads.v20.common.DynamicRealEstateAsset getDynamicRealEstateAsset(); /** * <pre> * A dynamic real estate asset. * </pre> * * <code>.google.ads.googleads.v20.common.DynamicRealEstateAsset dynamic_real_estate_asset = 30;</code> */ com.google.ads.googleads.v20.common.DynamicRealEstateAssetOrBuilder getDynamicRealEstateAssetOrBuilder(); /** * <pre> * A dynamic custom asset. * </pre> * * <code>.google.ads.googleads.v20.common.DynamicCustomAsset dynamic_custom_asset = 31;</code> * @return Whether the dynamicCustomAsset field is set. */ boolean hasDynamicCustomAsset(); /** * <pre> * A dynamic custom asset. * </pre> * * <code>.google.ads.googleads.v20.common.DynamicCustomAsset dynamic_custom_asset = 31;</code> * @return The dynamicCustomAsset. */ com.google.ads.googleads.v20.common.DynamicCustomAsset getDynamicCustomAsset(); /** * <pre> * A dynamic custom asset. * </pre> * * <code>.google.ads.googleads.v20.common.DynamicCustomAsset dynamic_custom_asset = 31;</code> */ com.google.ads.googleads.v20.common.DynamicCustomAssetOrBuilder getDynamicCustomAssetOrBuilder(); /** * <pre> * A dynamic hotels and rentals asset. * </pre> * * <code>.google.ads.googleads.v20.common.DynamicHotelsAndRentalsAsset dynamic_hotels_and_rentals_asset = 32;</code> * @return Whether the dynamicHotelsAndRentalsAsset field is set. */ boolean hasDynamicHotelsAndRentalsAsset(); /** * <pre> * A dynamic hotels and rentals asset. * </pre> * * <code>.google.ads.googleads.v20.common.DynamicHotelsAndRentalsAsset dynamic_hotels_and_rentals_asset = 32;</code> * @return The dynamicHotelsAndRentalsAsset. */ com.google.ads.googleads.v20.common.DynamicHotelsAndRentalsAsset getDynamicHotelsAndRentalsAsset(); /** * <pre> * A dynamic hotels and rentals asset. * </pre> * * <code>.google.ads.googleads.v20.common.DynamicHotelsAndRentalsAsset dynamic_hotels_and_rentals_asset = 32;</code> */ com.google.ads.googleads.v20.common.DynamicHotelsAndRentalsAssetOrBuilder getDynamicHotelsAndRentalsAssetOrBuilder(); /** * <pre> * A dynamic flights asset. * </pre> * * <code>.google.ads.googleads.v20.common.DynamicFlightsAsset dynamic_flights_asset = 33;</code> * @return Whether the dynamicFlightsAsset field is set. */ boolean hasDynamicFlightsAsset(); /** * <pre> * A dynamic flights asset. * </pre> * * <code>.google.ads.googleads.v20.common.DynamicFlightsAsset dynamic_flights_asset = 33;</code> * @return The dynamicFlightsAsset. */ com.google.ads.googleads.v20.common.DynamicFlightsAsset getDynamicFlightsAsset(); /** * <pre> * A dynamic flights asset. * </pre> * * <code>.google.ads.googleads.v20.common.DynamicFlightsAsset dynamic_flights_asset = 33;</code> */ com.google.ads.googleads.v20.common.DynamicFlightsAssetOrBuilder getDynamicFlightsAssetOrBuilder(); /** * <pre> * Immutable. A Demand Gen carousel card asset. * </pre> * * <code>.google.ads.googleads.v20.common.DemandGenCarouselCardAsset demand_gen_carousel_card_asset = 50 [(.google.api.field_behavior) = IMMUTABLE];</code> * @return Whether the demandGenCarouselCardAsset field is set. */ boolean hasDemandGenCarouselCardAsset(); /** * <pre> * Immutable. A Demand Gen carousel card asset. * </pre> * * <code>.google.ads.googleads.v20.common.DemandGenCarouselCardAsset demand_gen_carousel_card_asset = 50 [(.google.api.field_behavior) = IMMUTABLE];</code> * @return The demandGenCarouselCardAsset. */ com.google.ads.googleads.v20.common.DemandGenCarouselCardAsset getDemandGenCarouselCardAsset(); /** * <pre> * Immutable. A Demand Gen carousel card asset. * </pre> * * <code>.google.ads.googleads.v20.common.DemandGenCarouselCardAsset demand_gen_carousel_card_asset = 50 [(.google.api.field_behavior) = IMMUTABLE];</code> */ com.google.ads.googleads.v20.common.DemandGenCarouselCardAssetOrBuilder getDemandGenCarouselCardAssetOrBuilder(); /** * <pre> * A dynamic travel asset. * </pre> * * <code>.google.ads.googleads.v20.common.DynamicTravelAsset dynamic_travel_asset = 35;</code> * @return Whether the dynamicTravelAsset field is set. */ boolean hasDynamicTravelAsset(); /** * <pre> * A dynamic travel asset. * </pre> * * <code>.google.ads.googleads.v20.common.DynamicTravelAsset dynamic_travel_asset = 35;</code> * @return The dynamicTravelAsset. */ com.google.ads.googleads.v20.common.DynamicTravelAsset getDynamicTravelAsset(); /** * <pre> * A dynamic travel asset. * </pre> * * <code>.google.ads.googleads.v20.common.DynamicTravelAsset dynamic_travel_asset = 35;</code> */ com.google.ads.googleads.v20.common.DynamicTravelAssetOrBuilder getDynamicTravelAssetOrBuilder(); /** * <pre> * A dynamic local asset. * </pre> * * <code>.google.ads.googleads.v20.common.DynamicLocalAsset dynamic_local_asset = 36;</code> * @return Whether the dynamicLocalAsset field is set. */ boolean hasDynamicLocalAsset(); /** * <pre> * A dynamic local asset. * </pre> * * <code>.google.ads.googleads.v20.common.DynamicLocalAsset dynamic_local_asset = 36;</code> * @return The dynamicLocalAsset. */ com.google.ads.googleads.v20.common.DynamicLocalAsset getDynamicLocalAsset(); /** * <pre> * A dynamic local asset. * </pre> * * <code>.google.ads.googleads.v20.common.DynamicLocalAsset dynamic_local_asset = 36;</code> */ com.google.ads.googleads.v20.common.DynamicLocalAssetOrBuilder getDynamicLocalAssetOrBuilder(); /** * <pre> * A dynamic jobs asset. * </pre> * * <code>.google.ads.googleads.v20.common.DynamicJobsAsset dynamic_jobs_asset = 37;</code> * @return Whether the dynamicJobsAsset field is set. */ boolean hasDynamicJobsAsset(); /** * <pre> * A dynamic jobs asset. * </pre> * * <code>.google.ads.googleads.v20.common.DynamicJobsAsset dynamic_jobs_asset = 37;</code> * @return The dynamicJobsAsset. */ com.google.ads.googleads.v20.common.DynamicJobsAsset getDynamicJobsAsset(); /** * <pre> * A dynamic jobs asset. * </pre> * * <code>.google.ads.googleads.v20.common.DynamicJobsAsset dynamic_jobs_asset = 37;</code> */ com.google.ads.googleads.v20.common.DynamicJobsAssetOrBuilder getDynamicJobsAssetOrBuilder(); /** * <pre> * Output only. A location asset. * </pre> * * <code>.google.ads.googleads.v20.common.LocationAsset location_asset = 39 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return Whether the locationAsset field is set. */ boolean hasLocationAsset(); /** * <pre> * Output only. A location asset. * </pre> * * <code>.google.ads.googleads.v20.common.LocationAsset location_asset = 39 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The locationAsset. */ com.google.ads.googleads.v20.common.LocationAsset getLocationAsset(); /** * <pre> * Output only. A location asset. * </pre> * * <code>.google.ads.googleads.v20.common.LocationAsset location_asset = 39 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> */ com.google.ads.googleads.v20.common.LocationAssetOrBuilder getLocationAssetOrBuilder(); /** * <pre> * Immutable. A hotel property asset. * </pre> * * <code>.google.ads.googleads.v20.common.HotelPropertyAsset hotel_property_asset = 41 [(.google.api.field_behavior) = IMMUTABLE];</code> * @return Whether the hotelPropertyAsset field is set. */ boolean hasHotelPropertyAsset(); /** * <pre> * Immutable. A hotel property asset. * </pre> * * <code>.google.ads.googleads.v20.common.HotelPropertyAsset hotel_property_asset = 41 [(.google.api.field_behavior) = IMMUTABLE];</code> * @return The hotelPropertyAsset. */ com.google.ads.googleads.v20.common.HotelPropertyAsset getHotelPropertyAsset(); /** * <pre> * Immutable. A hotel property asset. * </pre> * * <code>.google.ads.googleads.v20.common.HotelPropertyAsset hotel_property_asset = 41 [(.google.api.field_behavior) = IMMUTABLE];</code> */ com.google.ads.googleads.v20.common.HotelPropertyAssetOrBuilder getHotelPropertyAssetOrBuilder(); /** * <pre> * A business message asset. * </pre> * * <code>.google.ads.googleads.v20.common.BusinessMessageAsset business_message_asset = 51;</code> * @return Whether the businessMessageAsset field is set. */ boolean hasBusinessMessageAsset(); /** * <pre> * A business message asset. * </pre> * * <code>.google.ads.googleads.v20.common.BusinessMessageAsset business_message_asset = 51;</code> * @return The businessMessageAsset. */ com.google.ads.googleads.v20.common.BusinessMessageAsset getBusinessMessageAsset(); /** * <pre> * A business message asset. * </pre> * * <code>.google.ads.googleads.v20.common.BusinessMessageAsset business_message_asset = 51;</code> */ com.google.ads.googleads.v20.common.BusinessMessageAssetOrBuilder getBusinessMessageAssetOrBuilder(); /** * <pre> * Immutable. An app deep link asset. * </pre> * * <code>.google.ads.googleads.v20.common.AppDeepLinkAsset app_deep_link_asset = 52 [(.google.api.field_behavior) = IMMUTABLE];</code> * @return Whether the appDeepLinkAsset field is set. */ boolean hasAppDeepLinkAsset(); /** * <pre> * Immutable. An app deep link asset. * </pre> * * <code>.google.ads.googleads.v20.common.AppDeepLinkAsset app_deep_link_asset = 52 [(.google.api.field_behavior) = IMMUTABLE];</code> * @return The appDeepLinkAsset. */ com.google.ads.googleads.v20.common.AppDeepLinkAsset getAppDeepLinkAsset(); /** * <pre> * Immutable. An app deep link asset. * </pre> * * <code>.google.ads.googleads.v20.common.AppDeepLinkAsset app_deep_link_asset = 52 [(.google.api.field_behavior) = IMMUTABLE];</code> */ com.google.ads.googleads.v20.common.AppDeepLinkAssetOrBuilder getAppDeepLinkAssetOrBuilder(); com.google.ads.googleads.v20.resources.Asset.AssetDataCase getAssetDataCase(); }
googleapis/google-cloud-java
36,624
java-video-intelligence/proto-google-cloud-video-intelligence-v1p3beta1/src/main/java/com/google/cloud/videointelligence/v1p3beta1/CelebrityRecognitionAnnotation.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/videointelligence/v1p3beta1/video_intelligence.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.videointelligence.v1p3beta1; /** * * * <pre> * Celebrity recognition annotation per video. * </pre> * * Protobuf type {@code google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation} */ public final class CelebrityRecognitionAnnotation extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation) CelebrityRecognitionAnnotationOrBuilder { private static final long serialVersionUID = 0L; // Use CelebrityRecognitionAnnotation.newBuilder() to construct. private CelebrityRecognitionAnnotation( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CelebrityRecognitionAnnotation() { celebrityTracks_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CelebrityRecognitionAnnotation(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.videointelligence.v1p3beta1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1p3beta1_CelebrityRecognitionAnnotation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.videointelligence.v1p3beta1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1p3beta1_CelebrityRecognitionAnnotation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation.class, com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation.Builder .class); } public static final int CELEBRITY_TRACKS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.videointelligence.v1p3beta1.CelebrityTrack> celebrityTracks_; /** * * * <pre> * The tracks detected from the input video, including recognized celebrities * and other detected faces in the video. * </pre> * * <code>repeated .google.cloud.videointelligence.v1p3beta1.CelebrityTrack celebrity_tracks = 1; * </code> */ @java.lang.Override public java.util.List<com.google.cloud.videointelligence.v1p3beta1.CelebrityTrack> getCelebrityTracksList() { return celebrityTracks_; } /** * * * <pre> * The tracks detected from the input video, including recognized celebrities * and other detected faces in the video. * </pre> * * <code>repeated .google.cloud.videointelligence.v1p3beta1.CelebrityTrack celebrity_tracks = 1; * </code> */ @java.lang.Override public java.util.List< ? extends com.google.cloud.videointelligence.v1p3beta1.CelebrityTrackOrBuilder> getCelebrityTracksOrBuilderList() { return celebrityTracks_; } /** * * * <pre> * The tracks detected from the input video, including recognized celebrities * and other detected faces in the video. * </pre> * * <code>repeated .google.cloud.videointelligence.v1p3beta1.CelebrityTrack celebrity_tracks = 1; * </code> */ @java.lang.Override public int getCelebrityTracksCount() { return celebrityTracks_.size(); } /** * * * <pre> * The tracks detected from the input video, including recognized celebrities * and other detected faces in the video. * </pre> * * <code>repeated .google.cloud.videointelligence.v1p3beta1.CelebrityTrack celebrity_tracks = 1; * </code> */ @java.lang.Override public com.google.cloud.videointelligence.v1p3beta1.CelebrityTrack getCelebrityTracks(int index) { return celebrityTracks_.get(index); } /** * * * <pre> * The tracks detected from the input video, including recognized celebrities * and other detected faces in the video. * </pre> * * <code>repeated .google.cloud.videointelligence.v1p3beta1.CelebrityTrack celebrity_tracks = 1; * </code> */ @java.lang.Override public com.google.cloud.videointelligence.v1p3beta1.CelebrityTrackOrBuilder getCelebrityTracksOrBuilder(int index) { return celebrityTracks_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < celebrityTracks_.size(); i++) { output.writeMessage(1, celebrityTracks_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < celebrityTracks_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, celebrityTracks_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation)) { return super.equals(obj); } com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation other = (com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation) obj; if (!getCelebrityTracksList().equals(other.getCelebrityTracksList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getCelebrityTracksCount() > 0) { hash = (37 * hash) + CELEBRITY_TRACKS_FIELD_NUMBER; hash = (53 * hash) + getCelebrityTracksList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Celebrity recognition annotation per video. * </pre> * * Protobuf type {@code google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation) com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotationOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.videointelligence.v1p3beta1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1p3beta1_CelebrityRecognitionAnnotation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.videointelligence.v1p3beta1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1p3beta1_CelebrityRecognitionAnnotation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation.class, com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation.Builder .class); } // Construct using // com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (celebrityTracksBuilder_ == null) { celebrityTracks_ = java.util.Collections.emptyList(); } else { celebrityTracks_ = null; celebrityTracksBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.videointelligence.v1p3beta1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1p3beta1_CelebrityRecognitionAnnotation_descriptor; } @java.lang.Override public com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation getDefaultInstanceForType() { return com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation .getDefaultInstance(); } @java.lang.Override public com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation build() { com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation buildPartial() { com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation result = new com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation result) { if (celebrityTracksBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { celebrityTracks_ = java.util.Collections.unmodifiableList(celebrityTracks_); bitField0_ = (bitField0_ & ~0x00000001); } result.celebrityTracks_ = celebrityTracks_; } else { result.celebrityTracks_ = celebrityTracksBuilder_.build(); } } private void buildPartial0( com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation) { return mergeFrom( (com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation other) { if (other == com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation .getDefaultInstance()) return this; if (celebrityTracksBuilder_ == null) { if (!other.celebrityTracks_.isEmpty()) { if (celebrityTracks_.isEmpty()) { celebrityTracks_ = other.celebrityTracks_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureCelebrityTracksIsMutable(); celebrityTracks_.addAll(other.celebrityTracks_); } onChanged(); } } else { if (!other.celebrityTracks_.isEmpty()) { if (celebrityTracksBuilder_.isEmpty()) { celebrityTracksBuilder_.dispose(); celebrityTracksBuilder_ = null; celebrityTracks_ = other.celebrityTracks_; bitField0_ = (bitField0_ & ~0x00000001); celebrityTracksBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getCelebrityTracksFieldBuilder() : null; } else { celebrityTracksBuilder_.addAllMessages(other.celebrityTracks_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.videointelligence.v1p3beta1.CelebrityTrack m = input.readMessage( com.google.cloud.videointelligence.v1p3beta1.CelebrityTrack.parser(), extensionRegistry); if (celebrityTracksBuilder_ == null) { ensureCelebrityTracksIsMutable(); celebrityTracks_.add(m); } else { celebrityTracksBuilder_.addMessage(m); } break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.videointelligence.v1p3beta1.CelebrityTrack> celebrityTracks_ = java.util.Collections.emptyList(); private void ensureCelebrityTracksIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { celebrityTracks_ = new java.util.ArrayList<com.google.cloud.videointelligence.v1p3beta1.CelebrityTrack>( celebrityTracks_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.videointelligence.v1p3beta1.CelebrityTrack, com.google.cloud.videointelligence.v1p3beta1.CelebrityTrack.Builder, com.google.cloud.videointelligence.v1p3beta1.CelebrityTrackOrBuilder> celebrityTracksBuilder_; /** * * * <pre> * The tracks detected from the input video, including recognized celebrities * and other detected faces in the video. * </pre> * * <code>repeated .google.cloud.videointelligence.v1p3beta1.CelebrityTrack celebrity_tracks = 1; * </code> */ public java.util.List<com.google.cloud.videointelligence.v1p3beta1.CelebrityTrack> getCelebrityTracksList() { if (celebrityTracksBuilder_ == null) { return java.util.Collections.unmodifiableList(celebrityTracks_); } else { return celebrityTracksBuilder_.getMessageList(); } } /** * * * <pre> * The tracks detected from the input video, including recognized celebrities * and other detected faces in the video. * </pre> * * <code>repeated .google.cloud.videointelligence.v1p3beta1.CelebrityTrack celebrity_tracks = 1; * </code> */ public int getCelebrityTracksCount() { if (celebrityTracksBuilder_ == null) { return celebrityTracks_.size(); } else { return celebrityTracksBuilder_.getCount(); } } /** * * * <pre> * The tracks detected from the input video, including recognized celebrities * and other detected faces in the video. * </pre> * * <code>repeated .google.cloud.videointelligence.v1p3beta1.CelebrityTrack celebrity_tracks = 1; * </code> */ public com.google.cloud.videointelligence.v1p3beta1.CelebrityTrack getCelebrityTracks( int index) { if (celebrityTracksBuilder_ == null) { return celebrityTracks_.get(index); } else { return celebrityTracksBuilder_.getMessage(index); } } /** * * * <pre> * The tracks detected from the input video, including recognized celebrities * and other detected faces in the video. * </pre> * * <code>repeated .google.cloud.videointelligence.v1p3beta1.CelebrityTrack celebrity_tracks = 1; * </code> */ public Builder setCelebrityTracks( int index, com.google.cloud.videointelligence.v1p3beta1.CelebrityTrack value) { if (celebrityTracksBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCelebrityTracksIsMutable(); celebrityTracks_.set(index, value); onChanged(); } else { celebrityTracksBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The tracks detected from the input video, including recognized celebrities * and other detected faces in the video. * </pre> * * <code>repeated .google.cloud.videointelligence.v1p3beta1.CelebrityTrack celebrity_tracks = 1; * </code> */ public Builder setCelebrityTracks( int index, com.google.cloud.videointelligence.v1p3beta1.CelebrityTrack.Builder builderForValue) { if (celebrityTracksBuilder_ == null) { ensureCelebrityTracksIsMutable(); celebrityTracks_.set(index, builderForValue.build()); onChanged(); } else { celebrityTracksBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The tracks detected from the input video, including recognized celebrities * and other detected faces in the video. * </pre> * * <code>repeated .google.cloud.videointelligence.v1p3beta1.CelebrityTrack celebrity_tracks = 1; * </code> */ public Builder addCelebrityTracks( com.google.cloud.videointelligence.v1p3beta1.CelebrityTrack value) { if (celebrityTracksBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCelebrityTracksIsMutable(); celebrityTracks_.add(value); onChanged(); } else { celebrityTracksBuilder_.addMessage(value); } return this; } /** * * * <pre> * The tracks detected from the input video, including recognized celebrities * and other detected faces in the video. * </pre> * * <code>repeated .google.cloud.videointelligence.v1p3beta1.CelebrityTrack celebrity_tracks = 1; * </code> */ public Builder addCelebrityTracks( int index, com.google.cloud.videointelligence.v1p3beta1.CelebrityTrack value) { if (celebrityTracksBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCelebrityTracksIsMutable(); celebrityTracks_.add(index, value); onChanged(); } else { celebrityTracksBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The tracks detected from the input video, including recognized celebrities * and other detected faces in the video. * </pre> * * <code>repeated .google.cloud.videointelligence.v1p3beta1.CelebrityTrack celebrity_tracks = 1; * </code> */ public Builder addCelebrityTracks( com.google.cloud.videointelligence.v1p3beta1.CelebrityTrack.Builder builderForValue) { if (celebrityTracksBuilder_ == null) { ensureCelebrityTracksIsMutable(); celebrityTracks_.add(builderForValue.build()); onChanged(); } else { celebrityTracksBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The tracks detected from the input video, including recognized celebrities * and other detected faces in the video. * </pre> * * <code>repeated .google.cloud.videointelligence.v1p3beta1.CelebrityTrack celebrity_tracks = 1; * </code> */ public Builder addCelebrityTracks( int index, com.google.cloud.videointelligence.v1p3beta1.CelebrityTrack.Builder builderForValue) { if (celebrityTracksBuilder_ == null) { ensureCelebrityTracksIsMutable(); celebrityTracks_.add(index, builderForValue.build()); onChanged(); } else { celebrityTracksBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The tracks detected from the input video, including recognized celebrities * and other detected faces in the video. * </pre> * * <code>repeated .google.cloud.videointelligence.v1p3beta1.CelebrityTrack celebrity_tracks = 1; * </code> */ public Builder addAllCelebrityTracks( java.lang.Iterable<? extends com.google.cloud.videointelligence.v1p3beta1.CelebrityTrack> values) { if (celebrityTracksBuilder_ == null) { ensureCelebrityTracksIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, celebrityTracks_); onChanged(); } else { celebrityTracksBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The tracks detected from the input video, including recognized celebrities * and other detected faces in the video. * </pre> * * <code>repeated .google.cloud.videointelligence.v1p3beta1.CelebrityTrack celebrity_tracks = 1; * </code> */ public Builder clearCelebrityTracks() { if (celebrityTracksBuilder_ == null) { celebrityTracks_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { celebrityTracksBuilder_.clear(); } return this; } /** * * * <pre> * The tracks detected from the input video, including recognized celebrities * and other detected faces in the video. * </pre> * * <code>repeated .google.cloud.videointelligence.v1p3beta1.CelebrityTrack celebrity_tracks = 1; * </code> */ public Builder removeCelebrityTracks(int index) { if (celebrityTracksBuilder_ == null) { ensureCelebrityTracksIsMutable(); celebrityTracks_.remove(index); onChanged(); } else { celebrityTracksBuilder_.remove(index); } return this; } /** * * * <pre> * The tracks detected from the input video, including recognized celebrities * and other detected faces in the video. * </pre> * * <code>repeated .google.cloud.videointelligence.v1p3beta1.CelebrityTrack celebrity_tracks = 1; * </code> */ public com.google.cloud.videointelligence.v1p3beta1.CelebrityTrack.Builder getCelebrityTracksBuilder(int index) { return getCelebrityTracksFieldBuilder().getBuilder(index); } /** * * * <pre> * The tracks detected from the input video, including recognized celebrities * and other detected faces in the video. * </pre> * * <code>repeated .google.cloud.videointelligence.v1p3beta1.CelebrityTrack celebrity_tracks = 1; * </code> */ public com.google.cloud.videointelligence.v1p3beta1.CelebrityTrackOrBuilder getCelebrityTracksOrBuilder(int index) { if (celebrityTracksBuilder_ == null) { return celebrityTracks_.get(index); } else { return celebrityTracksBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The tracks detected from the input video, including recognized celebrities * and other detected faces in the video. * </pre> * * <code>repeated .google.cloud.videointelligence.v1p3beta1.CelebrityTrack celebrity_tracks = 1; * </code> */ public java.util.List< ? extends com.google.cloud.videointelligence.v1p3beta1.CelebrityTrackOrBuilder> getCelebrityTracksOrBuilderList() { if (celebrityTracksBuilder_ != null) { return celebrityTracksBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(celebrityTracks_); } } /** * * * <pre> * The tracks detected from the input video, including recognized celebrities * and other detected faces in the video. * </pre> * * <code>repeated .google.cloud.videointelligence.v1p3beta1.CelebrityTrack celebrity_tracks = 1; * </code> */ public com.google.cloud.videointelligence.v1p3beta1.CelebrityTrack.Builder addCelebrityTracksBuilder() { return getCelebrityTracksFieldBuilder() .addBuilder( com.google.cloud.videointelligence.v1p3beta1.CelebrityTrack.getDefaultInstance()); } /** * * * <pre> * The tracks detected from the input video, including recognized celebrities * and other detected faces in the video. * </pre> * * <code>repeated .google.cloud.videointelligence.v1p3beta1.CelebrityTrack celebrity_tracks = 1; * </code> */ public com.google.cloud.videointelligence.v1p3beta1.CelebrityTrack.Builder addCelebrityTracksBuilder(int index) { return getCelebrityTracksFieldBuilder() .addBuilder( index, com.google.cloud.videointelligence.v1p3beta1.CelebrityTrack.getDefaultInstance()); } /** * * * <pre> * The tracks detected from the input video, including recognized celebrities * and other detected faces in the video. * </pre> * * <code>repeated .google.cloud.videointelligence.v1p3beta1.CelebrityTrack celebrity_tracks = 1; * </code> */ public java.util.List<com.google.cloud.videointelligence.v1p3beta1.CelebrityTrack.Builder> getCelebrityTracksBuilderList() { return getCelebrityTracksFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.videointelligence.v1p3beta1.CelebrityTrack, com.google.cloud.videointelligence.v1p3beta1.CelebrityTrack.Builder, com.google.cloud.videointelligence.v1p3beta1.CelebrityTrackOrBuilder> getCelebrityTracksFieldBuilder() { if (celebrityTracksBuilder_ == null) { celebrityTracksBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.videointelligence.v1p3beta1.CelebrityTrack, com.google.cloud.videointelligence.v1p3beta1.CelebrityTrack.Builder, com.google.cloud.videointelligence.v1p3beta1.CelebrityTrackOrBuilder>( celebrityTracks_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); celebrityTracks_ = null; } return celebrityTracksBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation) } // @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation) private static final com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation(); } public static com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CelebrityRecognitionAnnotation> PARSER = new com.google.protobuf.AbstractParser<CelebrityRecognitionAnnotation>() { @java.lang.Override public CelebrityRecognitionAnnotation parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CelebrityRecognitionAnnotation> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CelebrityRecognitionAnnotation> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/druid
36,635
processing/src/main/java/org/apache/druid/segment/data/VSizeLongSerde.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.segment.data; import com.google.common.base.Preconditions; import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.UOE; import javax.annotation.Nullable; import java.io.Closeable; import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; import java.nio.ByteOrder; /** * Currently only support big endian * <p> * An empty 4 bytes is written upon closing to avoid index out of bound exception for deserializers that shift bytes */ public class VSizeLongSerde { public static final int[] SUPPORTED_SIZES = {1, 2, 4, 8, 12, 16, 20, 24, 32, 40, 48, 56, 64}; public static final byte[] EMPTY = {0, 0, 0, 0}; public static int getBitsForMax(long value) { if (value < 0) { throw new IAE("maxValue[%s] must be positive", value); } byte numBits = 0; long maxValue = 1; for (int supportedSize : SUPPORTED_SIZES) { while (numBits < supportedSize && maxValue < Long.MAX_VALUE / 2) { numBits++; maxValue *= 2; } if (value <= maxValue || maxValue >= Long.MAX_VALUE / 2) { return supportedSize; } } return 64; } public static int getSerializedSize(int bitsPerValue, int numValues) { // this value is calculated by rounding up the byte and adding the 4 closing bytes return (bitsPerValue * numValues + 7) / 8 + 4; } /** * Block size should be power of 2, so {@link ColumnarLongs#get(int)} can be optimized using bit operators. */ public static int getNumValuesPerBlock(int bitsPerValue, int blockSize) { int ret = 1; while (getSerializedSize(bitsPerValue, ret) <= blockSize) { ret *= 2; } return ret / 2; } public static LongSerializer getSerializer(int longSize, OutputStream output) { switch (longSize) { case 1: return new Size1Ser(output); case 2: return new Size2Ser(output); case 4: return new Mult4Ser(output, 0); case 8: return new Mult8Ser(output, 1); case 12: return new Mult4Ser(output, 1); case 16: return new Mult8Ser(output, 2); case 20: return new Mult4Ser(output, 2); case 24: return new Mult8Ser(output, 3); case 32: return new Mult8Ser(output, 4); case 40: return new Mult8Ser(output, 5); case 48: return new Mult8Ser(output, 6); case 56: return new Mult8Ser(output, 7); case 64: return new Mult8Ser(output, 8); default: throw new IAE("Unsupported size %s", longSize); } } public static LongSerializer getSerializer(int longSize, ByteBuffer buffer, int bufferOffset) { switch (longSize) { case 1: return new Size1Ser(buffer, bufferOffset); case 2: return new Size2Ser(buffer, bufferOffset); case 4: return new Mult4Ser(buffer, bufferOffset, 0); case 8: return new Mult8Ser(buffer, bufferOffset, 1); case 12: return new Mult4Ser(buffer, bufferOffset, 1); case 16: return new Mult8Ser(buffer, bufferOffset, 2); case 20: return new Mult4Ser(buffer, bufferOffset, 2); case 24: return new Mult8Ser(buffer, bufferOffset, 3); case 32: return new Mult8Ser(buffer, bufferOffset, 4); case 40: return new Mult8Ser(buffer, bufferOffset, 5); case 48: return new Mult8Ser(buffer, bufferOffset, 6); case 56: return new Mult8Ser(buffer, bufferOffset, 7); case 64: return new Mult8Ser(buffer, bufferOffset, 8); default: throw new IAE("Unsupported size %s", longSize); } } // LongDeserializers were adapted from Apache Lucene DirectReader, see: // https://github.com/apache/lucene-solr/blob/master/lucene/core/src/java/org/apache/lucene/util/packed/DirectReader.java public static LongDeserializer getDeserializer(int longSize, ByteBuffer fromBuffer, int bufferOffset) { // The buffer needs to be duplicated since the byte order is changed ByteBuffer buffer = fromBuffer.duplicate().order(ByteOrder.BIG_ENDIAN); switch (longSize) { case 1: return new Size1Des(buffer, bufferOffset); case 2: return new Size2Des(buffer, bufferOffset); case 4: return new Size4Des(buffer, bufferOffset); case 8: return new Size8Des(buffer, bufferOffset); case 12: return new Size12Des(buffer, bufferOffset); case 16: return new Size16Des(buffer, bufferOffset); case 20: return new Size20Des(buffer, bufferOffset); case 24: return new Size24Des(buffer, bufferOffset); case 32: return new Size32Des(buffer, bufferOffset); case 40: return new Size40Des(buffer, bufferOffset); case 48: return new Size48Des(buffer, bufferOffset); case 56: return new Size56Des(buffer, bufferOffset); case 64: return new Size64Des(buffer, bufferOffset); default: throw new IAE("Unsupported size %s", longSize); } } public interface LongSerializer extends Closeable { void write(long value) throws IOException; } private static final class Size1Ser implements LongSerializer { @Nullable OutputStream output = null; ByteBuffer buffer; byte curByte = 0; int count = 0; private boolean closed = false; public Size1Ser(OutputStream output) { this.output = output; this.buffer = ByteBuffer.allocate(1); } public Size1Ser(ByteBuffer buffer, int offset) { this.buffer = buffer; this.buffer.position(offset); } @Override public void write(long value) throws IOException { Preconditions.checkArgument(value >= 0); if (count == 8) { buffer.put(curByte); count = 0; if (!buffer.hasRemaining() && output != null) { output.write(buffer.array()); buffer.rewind(); } } curByte = (byte) ((curByte << 1) | (value & 1)); count++; } @Override public void close() throws IOException { if (closed) { return; } buffer.put((byte) (curByte << (8 - count))); if (output != null) { output.write(buffer.array()); output.write(EMPTY); output.flush(); } else { buffer.putInt(0); } closed = true; } } private static final class Size2Ser implements LongSerializer { @Nullable OutputStream output = null; ByteBuffer buffer; byte curByte = 0; int count = 0; private boolean closed = false; public Size2Ser(OutputStream output) { this.output = output; this.buffer = ByteBuffer.allocate(1); } public Size2Ser(ByteBuffer buffer, int offset) { this.buffer = buffer; this.buffer.position(offset); } @Override public void write(long value) throws IOException { Preconditions.checkArgument(value >= 0); if (count == 8) { buffer.put(curByte); count = 0; if (!buffer.hasRemaining() && output != null) { output.write(buffer.array()); buffer.rewind(); } } curByte = (byte) ((curByte << 2) | (value & 3)); count += 2; } @Override public void close() throws IOException { if (closed) { return; } buffer.put((byte) (curByte << (8 - count))); if (output != null) { output.write(buffer.array()); output.write(EMPTY); output.flush(); } else { buffer.putInt(0); } closed = true; } } private static final class Mult4Ser implements LongSerializer { @Nullable OutputStream output; ByteBuffer buffer; int numBytes; byte curByte = 0; boolean first = true; private boolean closed = false; public Mult4Ser(OutputStream output, int numBytes) { this.output = output; this.buffer = ByteBuffer.allocate(numBytes * 2 + 1); this.numBytes = numBytes; } public Mult4Ser(ByteBuffer buffer, int offset, int numBytes) { this.buffer = buffer; this.buffer.position(offset); this.numBytes = numBytes; } @Override public void write(long value) throws IOException { Preconditions.checkArgument(value >= 0); int shift = 0; if (first) { shift = 4; curByte = (byte) value; first = false; } else { curByte = (byte) ((curByte << 4) | ((value >> (numBytes << 3)) & 0xF)); buffer.put(curByte); first = true; } for (int i = numBytes - 1; i >= 0; i--) { buffer.put((byte) (value >>> (i * 8 + shift))); } if (!buffer.hasRemaining() && output != null) { output.write(buffer.array()); buffer.rewind(); } } @Override public void close() throws IOException { if (closed) { return; } if (!first) { buffer.put((byte) (curByte << 4)); } if (output != null) { output.write(buffer.array(), 0, buffer.position()); output.write(EMPTY); output.flush(); } else { buffer.putInt(0); } closed = true; } } private static final class Mult8Ser implements LongSerializer { @Nullable OutputStream output; ByteBuffer buffer; int numBytes; private boolean closed = false; public Mult8Ser(OutputStream output, int numBytes) { this.output = output; this.buffer = ByteBuffer.allocate(1); this.numBytes = numBytes; } public Mult8Ser(ByteBuffer buffer, int offset, int numBytes) { this.buffer = buffer; this.buffer.position(offset); this.numBytes = numBytes; } @Override public void write(long value) throws IOException { if (numBytes != 8) { // if the value is not stored in a full long, ensure it is zero or positive Preconditions.checkArgument(value >= 0); } for (int i = numBytes - 1; i >= 0; i--) { buffer.put((byte) (value >>> (i * 8))); if (output != null) { output.write(buffer.array()); buffer.position(0); } } } @Override public void close() throws IOException { if (closed) { return; } if (output != null) { output.write(EMPTY); output.flush(); } else { buffer.putInt(0); } closed = true; } } /** * Unpack bitpacked long values from an underlying contiguous memory block */ public interface LongDeserializer { /** * Unpack long value at the specified row index */ long get(int index); /** * Unpack a contiguous vector of long values at the specified start index of length and adjust them by the supplied * delta base value. */ void getDelta(long[] out, int outPosition, int startIndex, int length, long base); /** * Unpack a non-contiguous vector of long values at the specified indexes and adjust them by the supplied delta base * value. */ default int getDelta(long[] out, int outPosition, int[] indexes, int length, int indexOffset, int limit, long base) { for (int i = 0; i < length; i++) { int index = indexes[outPosition + i] - indexOffset; if (index >= limit) { return i; } out[outPosition + i] = base + get(index); } return length; } /** * Unpack a contiguous vector of long values at the specified start index of length and lookup and replace stored * values based on their index in the supplied value lookup 'table' */ default void getTable(long[] out, int outPosition, int startIndex, int length, long[] table) { throw new UOE("Table decoding not supported for %s", this.getClass().getSimpleName()); } /** * Unpack a contiguous vector of long values at the specified indexes and lookup and replace stored values based on * their index in the supplied value lookup 'table' */ default int getTable( long[] out, int outPosition, int[] indexes, int length, int indexOffset, int limit, long[] table ) { for (int i = 0; i < length; i++) { int index = indexes[outPosition + i] - indexOffset; if (index >= limit) { return i; } out[outPosition + i] = table[(int) get(index)]; } return length; } } private static final class Size1Des implements LongDeserializer { final ByteBuffer buffer; final int offset; public Size1Des(ByteBuffer buffer, int bufferOffset) { this.buffer = buffer; this.offset = bufferOffset; } @Override public long get(int index) { int shift = 7 - (index & 7); return (buffer.get(offset + (index >> 3)) >> shift) & 1; } @Override public void getDelta(long[] out, int outPosition, int startIndex, int length, long base) { int index = startIndex; int i = 0; // byte align while ((index & 0x7) != 0 && i < length) { out[outPosition + i++] = base + get(index++); } for ( ; i + Byte.SIZE < length; index += Byte.SIZE) { final byte unpack = buffer.get(offset + (index >> 3)); out[outPosition + i++] = base + ((unpack >> 7) & 1); out[outPosition + i++] = base + ((unpack >> 6) & 1); out[outPosition + i++] = base + ((unpack >> 5) & 1); out[outPosition + i++] = base + ((unpack >> 4) & 1); out[outPosition + i++] = base + ((unpack >> 3) & 1); out[outPosition + i++] = base + ((unpack >> 2) & 1); out[outPosition + i++] = base + ((unpack >> 1) & 1); out[outPosition + i++] = base + (unpack & 1); } while (i < length) { out[outPosition + i++] = base + get(index++); } } @Override public void getTable(long[] out, int outPosition, int startIndex, int length, long[] table) { int index = startIndex; int i = 0; // byte align while ((index & 0x7) != 0 && i < length) { out[outPosition + i++] = table[(int) get(index++)]; } for ( ; i + Byte.SIZE < length; index += Byte.SIZE) { final byte unpack = buffer.get(offset + (index >> 3)); out[outPosition + i++] = table[(unpack >> 7) & 1]; out[outPosition + i++] = table[(unpack >> 6) & 1]; out[outPosition + i++] = table[(unpack >> 5) & 1]; out[outPosition + i++] = table[(unpack >> 4) & 1]; out[outPosition + i++] = table[(unpack >> 3) & 1]; out[outPosition + i++] = table[(unpack >> 2) & 1]; out[outPosition + i++] = table[(unpack >> 1) & 1]; out[outPosition + i++] = table[unpack & 1]; } while (i < length) { out[outPosition + i++] = table[(int) get(index++)]; } } } private static final class Size2Des implements LongDeserializer { final ByteBuffer buffer; final int offset; public Size2Des(ByteBuffer buffer, int bufferOffset) { this.buffer = buffer; this.offset = bufferOffset; } @Override public long get(int index) { int shift = 6 - ((index & 3) << 1); return (buffer.get(offset + (index >> 2)) >> shift) & 3; } @Override public void getDelta(long[] out, int outPosition, int startIndex, int length, long base) { int index = startIndex; int i = 0; // byte align while ((index & 0x3) != 0 && i < length) { out[outPosition + i++] = base + get(index++); } for ( ; i + 8 < length; index += 8) { final short unpack = buffer.getShort(offset + (index >> 2)); out[outPosition + i++] = base + ((unpack >> 14) & 3); out[outPosition + i++] = base + ((unpack >> 12) & 3); out[outPosition + i++] = base + ((unpack >> 10) & 3); out[outPosition + i++] = base + ((unpack >> 8) & 3); out[outPosition + i++] = base + ((unpack >> 6) & 3); out[outPosition + i++] = base + ((unpack >> 4) & 3); out[outPosition + i++] = base + ((unpack >> 2) & 3); out[outPosition + i++] = base + (unpack & 3); } while (i < length) { out[outPosition + i++] = base + get(index++); } } @Override public void getTable(long[] out, int outPosition, int startIndex, int length, long[] table) { int index = startIndex; int i = 0; // byte align while ((index & 0x3) != 0 && i < length) { out[outPosition + i++] = table[(int) get(index++)]; } for ( ; i + 8 < length; index += 8) { final short unpack = buffer.getShort(offset + (index >> 2)); out[outPosition + i++] = table[(unpack >> 14) & 3]; out[outPosition + i++] = table[(unpack >> 12) & 3]; out[outPosition + i++] = table[(unpack >> 10) & 3]; out[outPosition + i++] = table[(unpack >> 8) & 3]; out[outPosition + i++] = table[(unpack >> 6) & 3]; out[outPosition + i++] = table[(unpack >> 4) & 3]; out[outPosition + i++] = table[(unpack >> 2) & 3]; out[outPosition + i++] = table[unpack & 3]; } while (i < length) { out[outPosition + i++] = table[(int) get(index++)]; } } } private static final class Size4Des implements LongDeserializer { final ByteBuffer buffer; final int offset; public Size4Des(ByteBuffer buffer, int bufferOffset) { this.buffer = buffer; this.offset = bufferOffset; } @Override public long get(int index) { int shift = ((index + 1) & 1) << 2; return (buffer.get(offset + (index >> 1)) >> shift) & 0xF; } @Override public void getDelta(long[] out, int outPosition, int startIndex, int length, long base) { int index = startIndex; int i = 0; // byte align while ((index & 0x1) != 0 && i < length) { out[outPosition + i++] = base + (get(index++) & 0xF); } for ( ; i + 8 < length; index += 8) { final int unpack = buffer.getInt(offset + (index >> 1)); out[outPosition + i++] = base + ((unpack >> 28) & 0xF); out[outPosition + i++] = base + ((unpack >> 24) & 0xF); out[outPosition + i++] = base + ((unpack >> 20) & 0xF); out[outPosition + i++] = base + ((unpack >> 16) & 0xF); out[outPosition + i++] = base + ((unpack >> 12) & 0xF); out[outPosition + i++] = base + ((unpack >> 8) & 0xF); out[outPosition + i++] = base + ((unpack >> 4) & 0xF); out[outPosition + i++] = base + (unpack & 0xF); } while (i < length) { out[outPosition + i++] = base + get(index++); } } @Override public void getTable(long[] out, int outPosition, int startIndex, int length, long[] table) { int index = startIndex; int i = 0; // byte align while ((index & 0x1) != 0 && i < length) { out[outPosition + i++] = table[(int) get(index++)]; } for ( ; i + 8 < length; index += 8) { final int unpack = buffer.getInt(offset + (index >> 1)); out[outPosition + i++] = table[(unpack >> 28) & 0xF]; out[outPosition + i++] = table[(unpack >> 24) & 0xF]; out[outPosition + i++] = table[(unpack >> 20) & 0xF]; out[outPosition + i++] = table[(unpack >> 16) & 0xF]; out[outPosition + i++] = table[(unpack >> 12) & 0xF]; out[outPosition + i++] = table[(unpack >> 8) & 0xF]; out[outPosition + i++] = table[(unpack >> 4) & 0xF]; out[outPosition + i++] = table[unpack & 0xF]; } while (i < length) { out[outPosition + i++] = table[(int) get(index++)]; } } } private static final class Size8Des implements LongDeserializer { final ByteBuffer buffer; final int offset; public Size8Des(ByteBuffer buffer, int bufferOffset) { this.buffer = buffer; this.offset = bufferOffset; } @Override public long get(int index) { return buffer.get(offset + index) & 0xFF; } @Override public void getDelta(long[] out, int outPosition, int startIndex, int length, long base) { for (int i = 0, indexOffset = startIndex; i < length; i++, indexOffset++) { out[outPosition + i] = base + (buffer.get(offset + indexOffset) & 0xFF); } } @Override public int getDelta(long[] out, int outPosition, int[] indexes, int length, int indexOffset, int limit, long base) { for (int i = 0; i < length; i++) { int index = indexes[outPosition + i] - indexOffset; if (index >= limit) { return i; } out[outPosition + i] = base + (buffer.get(offset + index) & 0xFF); } return length; } @Override public void getTable(long[] out, int outPosition, int startIndex, int length, long[] table) { for (int i = 0, indexOffset = startIndex; i < length; i++, indexOffset++) { out[outPosition + i] = table[buffer.get(offset + indexOffset) & 0xFF]; } } @Override public int getTable(long[] out, int outPosition, int[] indexes, int length, int indexOffset, int limit, long[] table) { for (int i = 0; i < length; i++) { int index = indexes[outPosition + i] - indexOffset; if (index >= limit) { return i; } out[outPosition + i] = table[buffer.get(offset + index) & 0xFF]; } return length; } } private static final class Size12Des implements LongDeserializer { final ByteBuffer buffer; final int offset; public Size12Des(ByteBuffer buffer, int bufferOffset) { this.buffer = buffer; this.offset = bufferOffset; } @Override public long get(int index) { int shift = ((index + 1) & 1) << 2; int indexOffset = (index * 3) >> 1; return (buffer.getShort(offset + indexOffset) >> shift) & 0xFFF; } @Override public void getDelta(long[] out, int outPosition, int startIndex, int length, long base) { int i = 0; int index = startIndex; // every other value is byte aligned if ((index & 0x1) != 0) { out[outPosition + i++] = base + get(index++); } final int unpackSize = Long.BYTES + Integer.BYTES; for (int indexOffset = (index * 3) >> 1; i + 8 < length; indexOffset += unpackSize) { final long unpack = buffer.getLong(offset + indexOffset); final int unpack2 = buffer.getInt(offset + indexOffset + Long.BYTES); out[outPosition + i++] = base + ((unpack >> 52) & 0xFFF); out[outPosition + i++] = base + ((unpack >> 40) & 0xFFF); out[outPosition + i++] = base + ((unpack >> 28) & 0xFFF); out[outPosition + i++] = base + ((unpack >> 16) & 0xFFF); out[outPosition + i++] = base + ((unpack >> 4) & 0xFFF); out[outPosition + i++] = base + (((unpack & 0xF) << 8) | ((unpack2 >>> 24) & 0xFF)); out[outPosition + i++] = base + ((unpack2 >> 12) & 0xFFF); out[outPosition + i++] = base + (unpack2 & 0xFFF); } while (i < length) { out[outPosition + i] = base + get(startIndex + i); i++; } } } private static final class Size16Des implements LongDeserializer { final ByteBuffer buffer; final int offset; public Size16Des(ByteBuffer buffer, int bufferOffset) { this.buffer = buffer; this.offset = bufferOffset; } @Override public long get(int index) { return buffer.getShort(offset + (index << 1)) & 0xFFFF; } @Override public void getDelta(long[] out, int outPosition, int startIndex, int length, long base) { for (int i = 0, indexOffset = (startIndex << 1); i < length; i++, indexOffset += Short.BYTES) { out[outPosition + i] = base + (buffer.getShort(offset + indexOffset) & 0xFFFF); } } @Override public int getDelta(long[] out, int outPosition, int[] indexes, int length, int indexOffset, int limit, long base) { for (int i = 0; i < length; i++) { int index = indexes[outPosition + i] - indexOffset; if (index >= limit) { return i; } out[outPosition + i] = base + (buffer.getShort(offset + (index << 1)) & 0xFFFF); } return length; } } private static final class Size20Des implements LongDeserializer { final ByteBuffer buffer; final int offset; public Size20Des(ByteBuffer buffer, int bufferOffset) { this.buffer = buffer; this.offset = bufferOffset; } @Override public long get(int index) { int shift = (((index + 1) & 1) << 2) + 8; int indexOffset = (index * 5) >> 1; return (buffer.getInt(offset + indexOffset) >> shift) & 0xFFFFF; } @Override public void getDelta(long[] out, int outPosition, int startIndex, int length, long base) { int i = 0; int index = startIndex; // every other value is byte aligned if ((index & 0x1) != 0) { out[outPosition + i++] = base + get(index++); } final int unpackSize = Long.BYTES + Long.BYTES + Integer.BYTES; for (int indexOffset = (index * 5) >> 1; i + 8 < length; indexOffset += unpackSize) { final long unpack = buffer.getLong(offset + indexOffset); final long unpack2 = buffer.getLong(offset + indexOffset + Long.BYTES); final int unpack3 = buffer.getInt(offset + indexOffset + Long.BYTES + Long.BYTES); out[outPosition + i++] = base + ((unpack >> 44) & 0xFFFFF); out[outPosition + i++] = base + ((unpack >> 24) & 0xFFFFF); out[outPosition + i++] = base + ((unpack >> 4) & 0xFFFFF); out[outPosition + i++] = base + (((unpack & 0xF) << 16) | ((unpack2 >>> 48) & 0xFFFF)); out[outPosition + i++] = base + ((unpack2 >> 28) & 0xFFFFF); out[outPosition + i++] = base + ((unpack2 >> 8) & 0xFFFFF); out[outPosition + i++] = base + (((unpack2 & 0xFF) << 12) | ((unpack3 >>> 20) & 0xFFF)); out[outPosition + i++] = base + (unpack3 & 0xFFFFF); } while (i < length) { out[outPosition + i] = base + get(startIndex + i); i++; } } } private static final class Size24Des implements LongDeserializer { final ByteBuffer buffer; final int offset; public Size24Des(ByteBuffer buffer, int bufferOffset) { this.buffer = buffer; this.offset = bufferOffset; } @Override public long get(int index) { return buffer.getInt(offset + (index * 3)) >>> 8; } @Override public void getDelta(long[] out, int outPosition, int startIndex, int length, long base) { int i = 0; final int unpackSize = 3 * Long.BYTES; for (int indexOffset = startIndex * 3; i + 8 < length; indexOffset += unpackSize) { final long unpack = buffer.getLong(offset + indexOffset); final long unpack2 = buffer.getLong(offset + indexOffset + Long.BYTES); final long unpack3 = buffer.getLong(offset + indexOffset + Long.BYTES + Long.BYTES); out[outPosition + i++] = base + ((unpack >> 40) & 0xFFFFFF); out[outPosition + i++] = base + ((unpack >> 16) & 0xFFFFFF); out[outPosition + i++] = base + (((unpack & 0xFFFF) << 8) | ((unpack2 >>> 56) & 0xFF)); out[outPosition + i++] = base + ((unpack2 >> 32) & 0xFFFFFF); out[outPosition + i++] = base + ((unpack2 >> 8) & 0xFFFFFF); out[outPosition + i++] = base + (((unpack2 & 0xFF) << 16) | ((unpack3 >>> 48) & 0xFFFF)); out[outPosition + i++] = base + ((unpack3 >> 24) & 0xFFFFFF); out[outPosition + i++] = base + (unpack3 & 0xFFFFFF); } while (i < length) { out[outPosition + i] = base + get(startIndex + i); i++; } } } private static final class Size32Des implements LongDeserializer { final ByteBuffer buffer; final int offset; public Size32Des(ByteBuffer buffer, int bufferOffset) { this.buffer = buffer; this.offset = bufferOffset; } @Override public long get(int index) { return buffer.getInt((offset + (index << 2))) & 0xFFFFFFFFL; } @Override public void getDelta(long[] out, int outPosition, int startIndex, int length, long base) { for (int i = 0, indexOffset = (startIndex << 2); i < length; i++, indexOffset += Integer.BYTES) { out[outPosition + i] = base + (buffer.getInt(offset + indexOffset) & 0xFFFFFFFFL); } } } private static final class Size40Des implements LongDeserializer { final ByteBuffer buffer; final int offset; public Size40Des(ByteBuffer buffer, int bufferOffset) { this.buffer = buffer; this.offset = bufferOffset; } @Override public long get(int index) { return buffer.getLong(offset + (index * 5)) >>> 24; } @Override public void getDelta(long[] out, int outPosition, int startIndex, int length, long base) { int i = 0; final int unpackSize = 5 * Long.BYTES; for (int indexOffset = startIndex * 5; i + 8 < length; indexOffset += unpackSize) { final long unpack = buffer.getLong(offset + indexOffset); final long unpack2 = buffer.getLong(offset + indexOffset + Long.BYTES); final long unpack3 = buffer.getLong(offset + indexOffset + (2 * Long.BYTES)); final long unpack4 = buffer.getLong(offset + indexOffset + (3 * Long.BYTES)); final long unpack5 = buffer.getLong(offset + indexOffset + (4 * Long.BYTES)); out[outPosition + i++] = base + ((unpack >>> 24) & 0xFFFFFFFFFFL); out[outPosition + i++] = base + (((unpack & 0xFFFFFFL) << 16) | ((unpack2 >>> 48) & 0xFFFFL)); out[outPosition + i++] = base + ((unpack2 >>> 8) & 0xFFFFFFFFFFL); out[outPosition + i++] = base + (((unpack2 & 0xFFL) << 32) | ((unpack3 >>> 32) & 0xFFFFFFFFL)); out[outPosition + i++] = base + (((unpack3 & 0xFFFFFFFFL) << 8) | ((unpack4 >>> 56) & 0xFFL)); out[outPosition + i++] = base + ((unpack4 >>> 16) & 0xFFFFFFFFFFL); out[outPosition + i++] = base + (((unpack4 & 0xFFFFL) << 24) | ((unpack5 >>> 40) & 0xFFFFFFL)); out[outPosition + i++] = base + (unpack5 & 0xFFFFFFFFFFL); } while (i < length) { out[outPosition + i] = base + get(startIndex + i); i++; } } } private static final class Size48Des implements LongDeserializer { final ByteBuffer buffer; final int offset; public Size48Des(ByteBuffer buffer, int bufferOffset) { this.buffer = buffer; this.offset = bufferOffset; } @Override public long get(int index) { return buffer.getLong(offset + (index * 6)) >>> 16; } @Override public void getDelta(long[] out, int outPosition, int startIndex, int length, long base) { int i = 0; final int unpackSize = 6 * Long.BYTES; for (int indexOffset = startIndex * 6; i + 8 < length; indexOffset += unpackSize) { final long unpack = buffer.getLong(offset + indexOffset); final long unpack2 = buffer.getLong(offset + indexOffset + Long.BYTES); final long unpack3 = buffer.getLong(offset + indexOffset + (2 * Long.BYTES)); final long unpack4 = buffer.getLong(offset + indexOffset + (3 * Long.BYTES)); final long unpack5 = buffer.getLong(offset + indexOffset + (4 * Long.BYTES)); final long unpack6 = buffer.getLong(offset + indexOffset + (5 * Long.BYTES)); out[outPosition + i++] = base + ((unpack >>> 16) & 0xFFFFFFFFFFFFL); out[outPosition + i++] = base + (((unpack & 0xFFFFL) << 32) | ((unpack2 >>> 32) & 0xFFFFFFFFL)); out[outPosition + i++] = base + (((unpack2 & 0xFFFFFFFFL) << 16) | ((unpack3 >>> 48) & 0xFFFFL)); out[outPosition + i++] = base + (unpack3 & 0xFFFFFFFFFFFFL); out[outPosition + i++] = base + ((unpack4 >>> 16) & 0xFFFFFFFFFFFFL); out[outPosition + i++] = base + (((unpack4 & 0xFFFFL) << 32) | ((unpack5 >>> 32) & 0xFFFFFFFFL)); out[outPosition + i++] = base + (((unpack5 & 0xFFFFFFFFL) << 16) | ((unpack6 >>> 48) & 0xFFFFL)); out[outPosition + i++] = base + (unpack6 & 0xFFFFFFFFFFFFL); } while (i < length) { out[outPosition + i] = base + get(startIndex + i); i++; } } } private static final class Size56Des implements LongDeserializer { final ByteBuffer buffer; final int offset; public Size56Des(ByteBuffer buffer, int bufferOffset) { this.buffer = buffer; this.offset = bufferOffset; } @Override public long get(int index) { return buffer.getLong(offset + (index * 7)) >>> 8; } @Override public void getDelta(long[] out, int outPosition, int startIndex, int length, long base) { int i = 0; final int unpackSize = 7 * Long.BYTES; for (int indexOffset = startIndex * 7; i + 8 < length; indexOffset += unpackSize) { final long unpack = buffer.getLong(offset + indexOffset); final long unpack2 = buffer.getLong(offset + indexOffset + Long.BYTES); final long unpack3 = buffer.getLong(offset + indexOffset + (2 * Long.BYTES)); final long unpack4 = buffer.getLong(offset + indexOffset + (3 * Long.BYTES)); final long unpack5 = buffer.getLong(offset + indexOffset + (4 * Long.BYTES)); final long unpack6 = buffer.getLong(offset + indexOffset + (5 * Long.BYTES)); final long unpack7 = buffer.getLong(offset + indexOffset + (6 * Long.BYTES)); out[outPosition + i++] = base + ((unpack >>> 8) & 0xFFFFFFFFFFFFFFL); out[outPosition + i++] = base + (((unpack & 0xFFL) << 48) | ((unpack2 >>> 16) & 0xFFFFFFFFFFFFL)); out[outPosition + i++] = base + (((unpack2 & 0xFFFFL) << 40) | ((unpack3 >>> 24) & 0xFFFFFFFFFFL)); out[outPosition + i++] = base + (((unpack3 & 0xFFFFFFL) << 32) | ((unpack4 >>> 32) & 0xFFFFFFFFL)); out[outPosition + i++] = base + (((unpack4 & 0xFFFFFFFFL) << 24) | ((unpack5 >>> 40) & 0xFFFFFFL)); out[outPosition + i++] = base + (((unpack5 & 0xFFFFFFFFFFL) << 16) | ((unpack6 >>> 48) & 0xFFFFL)); out[outPosition + i++] = base + (((unpack6 & 0xFFFFFFFFFFFFL) << 8) | ((unpack7 >>> 56) & 0xFFL)); out[outPosition + i++] = base + (unpack7 & 0xFFFFFFFFFFFFFFL); } while (i < length) { out[outPosition + i] = base + get(startIndex + i); i++; } } } private static final class Size64Des implements LongDeserializer { final ByteBuffer buffer; final int offset; public Size64Des(ByteBuffer buffer, int bufferOffset) { this.buffer = buffer; this.offset = bufferOffset; } @Override public long get(int index) { return buffer.getLong(offset + (index << 3)); } @Override public void getDelta(long[] out, int outPosition, int startIndex, int length, long base) { for (int i = 0, indexOffset = (startIndex << 3); i < length; i++, indexOffset += Long.BYTES) { out[outPosition + i] = base + buffer.getLong(offset + indexOffset); } } @Override public int getDelta(long[] out, int outPosition, int[] indexes, int length, int indexOffset, int limit, long base) { for (int i = 0; i < length; i++) { int index = indexes[outPosition + i] - indexOffset; if (index >= limit) { return i; } out[outPosition + i] = base + buffer.getLong(offset + (index << 3)); } return length; } } }
googleapis/google-cloud-java
36,568
java-cloudbuild/proto-google-cloud-build-v2/src/main/java/com/google/cloudbuild/v2/FetchLinkableRepositoriesResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/devtools/cloudbuild/v2/repositories.proto // Protobuf Java Version: 3.25.8 package com.google.cloudbuild.v2; /** * * * <pre> * Response message for FetchLinkableRepositories. * </pre> * * Protobuf type {@code google.devtools.cloudbuild.v2.FetchLinkableRepositoriesResponse} */ public final class FetchLinkableRepositoriesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.devtools.cloudbuild.v2.FetchLinkableRepositoriesResponse) FetchLinkableRepositoriesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use FetchLinkableRepositoriesResponse.newBuilder() to construct. private FetchLinkableRepositoriesResponse( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private FetchLinkableRepositoriesResponse() { repositories_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new FetchLinkableRepositoriesResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloudbuild.v2.RepositoryManagerProto .internal_static_google_devtools_cloudbuild_v2_FetchLinkableRepositoriesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloudbuild.v2.RepositoryManagerProto .internal_static_google_devtools_cloudbuild_v2_FetchLinkableRepositoriesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse.class, com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse.Builder.class); } public static final int REPOSITORIES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloudbuild.v2.Repository> repositories_; /** * * * <pre> * repositories ready to be created. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloudbuild.v2.Repository> getRepositoriesList() { return repositories_; } /** * * * <pre> * repositories ready to be created. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloudbuild.v2.RepositoryOrBuilder> getRepositoriesOrBuilderList() { return repositories_; } /** * * * <pre> * repositories ready to be created. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ @java.lang.Override public int getRepositoriesCount() { return repositories_.size(); } /** * * * <pre> * repositories ready to be created. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ @java.lang.Override public com.google.cloudbuild.v2.Repository getRepositories(int index) { return repositories_.get(index); } /** * * * <pre> * repositories ready to be created. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ @java.lang.Override public com.google.cloudbuild.v2.RepositoryOrBuilder getRepositoriesOrBuilder(int index) { return repositories_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < repositories_.size(); i++) { output.writeMessage(1, repositories_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < repositories_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, repositories_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse)) { return super.equals(obj); } com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse other = (com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse) obj; if (!getRepositoriesList().equals(other.getRepositoriesList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getRepositoriesCount() > 0) { hash = (37 * hash) + REPOSITORIES_FIELD_NUMBER; hash = (53 * hash) + getRepositoriesList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for FetchLinkableRepositories. * </pre> * * Protobuf type {@code google.devtools.cloudbuild.v2.FetchLinkableRepositoriesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.devtools.cloudbuild.v2.FetchLinkableRepositoriesResponse) com.google.cloudbuild.v2.FetchLinkableRepositoriesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloudbuild.v2.RepositoryManagerProto .internal_static_google_devtools_cloudbuild_v2_FetchLinkableRepositoriesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloudbuild.v2.RepositoryManagerProto .internal_static_google_devtools_cloudbuild_v2_FetchLinkableRepositoriesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse.class, com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse.Builder.class); } // Construct using com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (repositoriesBuilder_ == null) { repositories_ = java.util.Collections.emptyList(); } else { repositories_ = null; repositoriesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloudbuild.v2.RepositoryManagerProto .internal_static_google_devtools_cloudbuild_v2_FetchLinkableRepositoriesResponse_descriptor; } @java.lang.Override public com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse getDefaultInstanceForType() { return com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse build() { com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse buildPartial() { com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse result = new com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse result) { if (repositoriesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { repositories_ = java.util.Collections.unmodifiableList(repositories_); bitField0_ = (bitField0_ & ~0x00000001); } result.repositories_ = repositories_; } else { result.repositories_ = repositoriesBuilder_.build(); } } private void buildPartial0(com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse) { return mergeFrom((com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse other) { if (other == com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse.getDefaultInstance()) return this; if (repositoriesBuilder_ == null) { if (!other.repositories_.isEmpty()) { if (repositories_.isEmpty()) { repositories_ = other.repositories_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureRepositoriesIsMutable(); repositories_.addAll(other.repositories_); } onChanged(); } } else { if (!other.repositories_.isEmpty()) { if (repositoriesBuilder_.isEmpty()) { repositoriesBuilder_.dispose(); repositoriesBuilder_ = null; repositories_ = other.repositories_; bitField0_ = (bitField0_ & ~0x00000001); repositoriesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRepositoriesFieldBuilder() : null; } else { repositoriesBuilder_.addAllMessages(other.repositories_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloudbuild.v2.Repository m = input.readMessage( com.google.cloudbuild.v2.Repository.parser(), extensionRegistry); if (repositoriesBuilder_ == null) { ensureRepositoriesIsMutable(); repositories_.add(m); } else { repositoriesBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloudbuild.v2.Repository> repositories_ = java.util.Collections.emptyList(); private void ensureRepositoriesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { repositories_ = new java.util.ArrayList<com.google.cloudbuild.v2.Repository>(repositories_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloudbuild.v2.Repository, com.google.cloudbuild.v2.Repository.Builder, com.google.cloudbuild.v2.RepositoryOrBuilder> repositoriesBuilder_; /** * * * <pre> * repositories ready to be created. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public java.util.List<com.google.cloudbuild.v2.Repository> getRepositoriesList() { if (repositoriesBuilder_ == null) { return java.util.Collections.unmodifiableList(repositories_); } else { return repositoriesBuilder_.getMessageList(); } } /** * * * <pre> * repositories ready to be created. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public int getRepositoriesCount() { if (repositoriesBuilder_ == null) { return repositories_.size(); } else { return repositoriesBuilder_.getCount(); } } /** * * * <pre> * repositories ready to be created. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public com.google.cloudbuild.v2.Repository getRepositories(int index) { if (repositoriesBuilder_ == null) { return repositories_.get(index); } else { return repositoriesBuilder_.getMessage(index); } } /** * * * <pre> * repositories ready to be created. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public Builder setRepositories(int index, com.google.cloudbuild.v2.Repository value) { if (repositoriesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRepositoriesIsMutable(); repositories_.set(index, value); onChanged(); } else { repositoriesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * repositories ready to be created. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public Builder setRepositories( int index, com.google.cloudbuild.v2.Repository.Builder builderForValue) { if (repositoriesBuilder_ == null) { ensureRepositoriesIsMutable(); repositories_.set(index, builderForValue.build()); onChanged(); } else { repositoriesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * repositories ready to be created. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public Builder addRepositories(com.google.cloudbuild.v2.Repository value) { if (repositoriesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRepositoriesIsMutable(); repositories_.add(value); onChanged(); } else { repositoriesBuilder_.addMessage(value); } return this; } /** * * * <pre> * repositories ready to be created. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public Builder addRepositories(int index, com.google.cloudbuild.v2.Repository value) { if (repositoriesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRepositoriesIsMutable(); repositories_.add(index, value); onChanged(); } else { repositoriesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * repositories ready to be created. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public Builder addRepositories(com.google.cloudbuild.v2.Repository.Builder builderForValue) { if (repositoriesBuilder_ == null) { ensureRepositoriesIsMutable(); repositories_.add(builderForValue.build()); onChanged(); } else { repositoriesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * repositories ready to be created. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public Builder addRepositories( int index, com.google.cloudbuild.v2.Repository.Builder builderForValue) { if (repositoriesBuilder_ == null) { ensureRepositoriesIsMutable(); repositories_.add(index, builderForValue.build()); onChanged(); } else { repositoriesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * repositories ready to be created. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public Builder addAllRepositories( java.lang.Iterable<? extends com.google.cloudbuild.v2.Repository> values) { if (repositoriesBuilder_ == null) { ensureRepositoriesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, repositories_); onChanged(); } else { repositoriesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * repositories ready to be created. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public Builder clearRepositories() { if (repositoriesBuilder_ == null) { repositories_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { repositoriesBuilder_.clear(); } return this; } /** * * * <pre> * repositories ready to be created. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public Builder removeRepositories(int index) { if (repositoriesBuilder_ == null) { ensureRepositoriesIsMutable(); repositories_.remove(index); onChanged(); } else { repositoriesBuilder_.remove(index); } return this; } /** * * * <pre> * repositories ready to be created. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public com.google.cloudbuild.v2.Repository.Builder getRepositoriesBuilder(int index) { return getRepositoriesFieldBuilder().getBuilder(index); } /** * * * <pre> * repositories ready to be created. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public com.google.cloudbuild.v2.RepositoryOrBuilder getRepositoriesOrBuilder(int index) { if (repositoriesBuilder_ == null) { return repositories_.get(index); } else { return repositoriesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * repositories ready to be created. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public java.util.List<? extends com.google.cloudbuild.v2.RepositoryOrBuilder> getRepositoriesOrBuilderList() { if (repositoriesBuilder_ != null) { return repositoriesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(repositories_); } } /** * * * <pre> * repositories ready to be created. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public com.google.cloudbuild.v2.Repository.Builder addRepositoriesBuilder() { return getRepositoriesFieldBuilder() .addBuilder(com.google.cloudbuild.v2.Repository.getDefaultInstance()); } /** * * * <pre> * repositories ready to be created. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public com.google.cloudbuild.v2.Repository.Builder addRepositoriesBuilder(int index) { return getRepositoriesFieldBuilder() .addBuilder(index, com.google.cloudbuild.v2.Repository.getDefaultInstance()); } /** * * * <pre> * repositories ready to be created. * </pre> * * <code>repeated .google.devtools.cloudbuild.v2.Repository repositories = 1;</code> */ public java.util.List<com.google.cloudbuild.v2.Repository.Builder> getRepositoriesBuilderList() { return getRepositoriesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloudbuild.v2.Repository, com.google.cloudbuild.v2.Repository.Builder, com.google.cloudbuild.v2.RepositoryOrBuilder> getRepositoriesFieldBuilder() { if (repositoriesBuilder_ == null) { repositoriesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloudbuild.v2.Repository, com.google.cloudbuild.v2.Repository.Builder, com.google.cloudbuild.v2.RepositoryOrBuilder>( repositories_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); repositories_ = null; } return repositoriesBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.devtools.cloudbuild.v2.FetchLinkableRepositoriesResponse) } // @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v2.FetchLinkableRepositoriesResponse) private static final com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse(); } public static com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<FetchLinkableRepositoriesResponse> PARSER = new com.google.protobuf.AbstractParser<FetchLinkableRepositoriesResponse>() { @java.lang.Override public FetchLinkableRepositoriesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<FetchLinkableRepositoriesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<FetchLinkableRepositoriesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloudbuild.v2.FetchLinkableRepositoriesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/geode
35,938
geode-core/src/integrationTest/java/org/apache/geode/cache/query/internal/index/IndexedMergeEquiJoinScenariosJUnitTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * Created on Dec 2, 2005 */ package org.apache.geode.cache.query.internal.index; import static org.apache.geode.cache.Region.SEPARATOR; import static org.junit.Assert.fail; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.Set; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import org.apache.geode.cache.Region; import org.apache.geode.cache.query.CacheUtils; import org.apache.geode.cache.query.Index; import org.apache.geode.cache.query.IndexType; import org.apache.geode.cache.query.Query; import org.apache.geode.cache.query.QueryService; import org.apache.geode.cache.query.SelectResults; import org.apache.geode.cache.query.data.City; import org.apache.geode.cache.query.data.Country; import org.apache.geode.cache.query.data.District; import org.apache.geode.cache.query.data.Portfolio; import org.apache.geode.cache.query.data.State; import org.apache.geode.cache.query.data.Village; import org.apache.geode.cache.query.functional.StructSetOrResultsSet; import org.apache.geode.cache.query.internal.QueryObserverAdapter; import org.apache.geode.cache.query.internal.QueryObserverHolder; import org.apache.geode.test.junit.categories.OQLIndexTest; @Category({OQLIndexTest.class}) public class IndexedMergeEquiJoinScenariosJUnitTest { @Before public void setUp() throws java.lang.Exception { CacheUtils.log("Creating regions"); CacheUtils.startCache(); Region region1 = CacheUtils.createRegion("Portfolios1", Portfolio.class); for (int i = 0; i < 5; i++) { region1.put("" + i, new Portfolio(i)); } Region region2 = CacheUtils.createRegion("Portfolios2", Portfolio.class); for (int i = 0; i < 2; i++) { region2.put("" + i, new Portfolio(i)); } Region region3 = CacheUtils.createRegion("Portfolios3", Portfolio.class); for (int i = 0; i < 4; i++) { region3.put("" + i, new Portfolio(i)); } CacheUtils.log("Portfolio regions created and populated"); Region region4 = CacheUtils.createRegion("Countries1", Country.class); Region region5 = CacheUtils.createRegion("Countries2", Country.class); Region region6 = CacheUtils.createRegion("Countries3", Country.class); Village v1 = new Village("MAHARASHTRA_VILLAGE1", 123456); Village v2 = new Village("PUNJAB_VILLAGE1", 123789); Set villages = new HashSet(); villages.add(v1); villages.add(v2); // villages.add(v3); //villages.add(v4); villages.add(v5); /* create cities */ City ct1 = new City("MUMBAI", 123456); City ct2 = new City("PUNE", 123789); Set cities = new HashSet(); cities.add(ct1); cities.add(ct2); // cities.add(ct3); cities.add(ct4); /* create districts */ District d1 = new District("MUMBAIDIST", cities, villages); District d2 = new District("PUNEDIST", cities, villages); Set districts = new HashSet(); districts.add(d1); districts.add(d2); // districts.add(d3); districts.add(d4); /* create states */ State s1 = new State("MAHARASHTRA", "west", districts); State s2 = new State("PUNJAB", "north", districts); Set states = new HashSet(); states.add(s1); states.add(s2); // states.add(s3); //states.add(s4); states.add(s5); /* create countries */ Country c1 = new Country("INDIA", "asia", states); Country c2 = new Country("ISRAEL", "africa", states); for (int i = 1; i < 3; i++) { int temp; temp = i % 3; switch (temp) { case 1: region4.put(i, c1); region5.put(i, c1); region6.put(i, c1); break; case 2: region4.put(i, c2); region5.put(i, c2); region6.put(i, c2); break; default: CacheUtils.log("Nothing to add in region for: " + temp); break; }// end of switch } // end of for CacheUtils.log("Country regions created and populated"); } @Test public void testNonNestedQueries() throws Exception { CacheUtils.getQueryService(); IndexManager.TEST_RANGEINDEX_ONLY = true; try { String[] queries = { /* * 1* * "select distinct * from /Portfolios1 pf1, /Portfolios2 pf2, /Countries1 c1, /Countries2 c2 " * + "where pf1.status = pf2.status and c1.name = c2.name", /*2 */ "select distinct * from " + SEPARATOR + "Portfolios1 pf1, pf1.positions.values pos1, " + SEPARATOR + "Portfolios2 pf2, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2 " + "where pf1.status = pf2.status and c1.name = c2.name or pos1.secId = 'IBM'", /* 3 */ "select distinct * from " + SEPARATOR + "Portfolios1 pf1, " + SEPARATOR + "Portfolios2 pf2, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2 " + "where pf1.status = pf2.status or c1.name = c2.name", /* 4 */ "Select distinct * " + "from " + SEPARATOR + "Portfolios1 pfos, pfos.positions.values Pos1, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2, " + SEPARATOR + "Countries3 c3, " + SEPARATOR + "Portfolios3 pfo3 " + "where Pos1.secId = 'YHOO' and c1.name = c2.name or c3.name = 'INDIA' and pfo3.status != 'inactive' or pfo3.\"type\" = 'type1' and pfo3.status = pfos.status ", /* 5 */ "Select distinct * " + "from " + SEPARATOR + "Portfolios1 pfos, pfos.positions.values Pos1, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2, " + SEPARATOR + "Countries3 c3, " + SEPARATOR + "Portfolios3 pfo3 " + "where Pos1.secId = 'YHOO' or c1.name = c2.name or c3.name = 'INDIA' and pfo3.status != 'inactive' or pfo3.\"type\" = 'type1' and pfo3.status = pfos.status ", /* 6 */ "Select distinct * " + "from " + SEPARATOR + "Portfolios1 pfos, " + "pfos.positions.values Pos1, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2, " + SEPARATOR + "Countries3 c3, " + SEPARATOR + "Portfolios3 pfo3 " + "where Pos1.secId = 'YHOO' or " + "c1.name = c2.name or " + "pfo3.status != 'inactive' or " + "pfo3.status = pfos.status ", /* 7 */ "Select distinct * " + "from " + SEPARATOR + "Countries1 c1, c1.states sts1, sts1.districts dists, dists.cities ct1, dists.villages villgs1, " + SEPARATOR + "Countries2 c2, " + SEPARATOR + "Countries3 c3, " + SEPARATOR + "Portfolios3 pfo3 " + "where " + "c1.name = c2.name or " + "ct1.name != 'PUNE' or " + "villgs1.name = 'MAHARASHTRA_VILLAGE1'", /* 8 */ "Select distinct * " + "from " + SEPARATOR + "Countries1 c1, c1.states sts1, sts1.districts dists, dists.cities ct1, dists.villages villgs1, " + SEPARATOR + "Countries2 c2, " + SEPARATOR + "Countries3 c3 " + "where " + "c1.name = c2.name and " + "ct1.name != 'PUNE' and " + "villgs1.name = 'MAHARASHTRA_VILLAGE1'", /* 9 */ "Select distinct * " + "from " + SEPARATOR + "Countries1 c1, c1.states sts1, sts1.districts dists1, dists1.cities ct1, dists1.villages villgs1, " + SEPARATOR + "Countries2 c2, c2.states s2, " + SEPARATOR + "Countries3 c3, c3.states sts3, sts3.districts dists3, dists3.cities ct3, dists3.villages villgs3 " + "where " + "c1.name = c2.name and " + "ct1.name != 'PUNE' and " + "villgs1.name = 'MAHARASHTRA_VILLAGE1' or " + "villgs1.name = villgs3.name or " + "s2.name = 'PUNJAB' or " + "ct1.name = ct3.name and " + "dists3.name = 'MUMBAIDIST'", /* 10 */ "Select distinct * " + "from " + SEPARATOR + "Countries1 c1, c1.states sts1, sts1.districts dists1, dists1.cities ct1, dists1.villages villgs1, " + SEPARATOR + "Countries2 c2, c2.states s2, " + SEPARATOR + "Countries3 c3, c3.states sts3, sts3.districts dists3, dists3.cities ct3, dists3.villages villgs3 " + "where " + "c1.name = c2.name or " + "ct1.name != 'PUNE' and " + "villgs1.name = 'MAHARASHTRA_VILLAGE1' or " + "villgs1.name = villgs3.name or " + "s2.name = 'PUNJAB' or " + "ct1.name = ct3.name and " + "dists3.name = 'MUMBAIDIST'", /* 11 */ "Select distinct * " + "from " + SEPARATOR + "Countries1 c1, c1.states sts1, sts1.districts dists1, dists1.cities ct1, dists1.villages villgs1, " + SEPARATOR + "Countries2 c2, c2.states s2, " + SEPARATOR + "Countries3 c3, c3.states sts3, sts3.districts dists3, dists3.cities ct3, dists3.villages villgs3 " + "where " + "c1.name = c2.name and " + "ct1.name != 'PUNE' or " + "villgs1.name = 'MAHARASHTRA_VILLAGE1' or " + "villgs1.name = villgs3.name or " + "s2.name = 'PUNJAB' or " + "ct1.name = ct3.name and " + "dists3.name = 'MUMBAIDIST'", /* 12 */ "Select distinct * " + "from " + SEPARATOR + "Countries1 c1, c1.states sts1, sts1.districts dists1, dists1.cities ct1, dists1.villages villgs1, " + SEPARATOR + "Countries2 c2, c2.states s2, " + SEPARATOR + "Countries3 c3, c3.states sts3, sts3.districts dists3, dists3.cities ct3, dists3.villages villgs3 " + "where " + "c1.name = c2.name and " + "ct1.name != 'PUNE' or " + "villgs1.name = 'MAHARASHTRA_VILLAGE1' and " + "villgs1.name = villgs3.name or " + "s2.name = 'PUNJAB' or " + "ct1.name = ct3.name or " + "dists3.name = 'MUMBAIDIST'", /* 13 */ "Select distinct * " + "from " + SEPARATOR + "Countries1 c1, c1.states sts1, sts1.districts dists1, dists1.cities ct1, dists1.villages villgs1, " + SEPARATOR + "Countries2 c2, c2.states s2, " + SEPARATOR + "Countries3 c3, c3.states sts3, sts3.districts dists3, dists3.cities ct3, dists3.villages villgs3 " + "where " + "c1.name = c2.name and " + "ct1.name != 'PUNE' and " + "villgs1.name = 'MAHARASHTRA_VILLAGE1' and " + "villgs1.name = villgs3.name or " + "s2.name = 'PUNJAB' and " + "ct1.name = ct3.name and " + "dists3.name = 'MUMBAIDIST'", /* 14 */ "Select distinct * " + "from " + SEPARATOR + "Countries1 c1, c1.states sts1, sts1.districts dists1, dists1.cities ct1, dists1.villages villgs1, " + SEPARATOR + "Countries2 c2, c2.states s2, " + SEPARATOR + "Countries3 c3, c3.states sts3, sts3.districts dists3, dists3.cities ct3, dists3.villages villgs3 " + "where " + "c1.name = c2.name or " + "ct1.name != 'PUNE' or " + "villgs1.name = 'MAHARASHTRA_VILLAGE1' or " + "villgs1.name = villgs3.name or " + "s2.name = 'PUNJAB' or " + "ct1.name = ct3.name or " + "dists3.name = 'MUMBAIDIST'", /* 15 */ "Select distinct * " + "from " + SEPARATOR + "Countries1 c1, c1.states sts1, sts1.districts dists1, dists1.cities ct1, dists1.villages villgs1, " + SEPARATOR + "Countries2 c2, c2.states s2, " + SEPARATOR + "Countries3 c3, c3.states sts3, sts3.districts dists3, dists3.cities ct3, dists3.villages villgs3 " + "where " + "c1.name = c2.name and " + "ct1.name != 'PUNE' and " + "villgs1.name = 'MAHARASHTRA_VILLAGE1' and " + "villgs1.name = villgs3.name and " + "s2.name = 'PUNJAB' and " + "ct1.name = ct3.name and " + "dists3.name = 'MUMBAIDIST'", /* 16 */ "Select distinct * " + "from " + SEPARATOR + "Countries1 c1, c1.states sts1, sts1.districts dists1, dists1.cities ct1, dists1.villages villgs1, " + SEPARATOR + "Countries2 c2, c2.states s2, " + SEPARATOR + "Countries3 c3, c3.states sts3, sts3.districts dists3, dists3.cities ct3, dists3.villages villgs3 " + "where " + "c1.name = c2.name and " + "sts1.name != 'PUNJAB' and " + "ct1.name != 'PUNE' and " + "villgs1.name = 'MAHARASHTRA_VILLAGE1' and " + "villgs1.name = villgs3.name and " + "sts3.name != sts1.name and " + "s2.name = 'PUNJAB' and " + "ct1.name = ct3.name and " + "dists3.name = 'MUMBAIDIST' and dists3.name != s2.name", /* 17 */ "Select distinct * " + "from " + SEPARATOR + "Portfolios1 pfos, " + "pfos.positions.values Pos1, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2, " + SEPARATOR + "Countries3 c3, " + SEPARATOR + "Portfolios3 pfo3 " + "where Pos1.secId = 'YHOO' and " + "c1.name = c2.name or " + "pfo3.status != 'inactive' and " + "pfo3.status = pfos.status ", /* 18 */ "Select distinct * " + "from " + SEPARATOR + "Portfolios1 pfos, " + "pfos.positions.values Pos1, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2, " + SEPARATOR + "Countries3 c3, " + SEPARATOR + "Portfolios3 pfo3 " + "where Pos1.secId = 'YHOO' and " + "c1.name = c2.name or " + "pfo3.status != 'inactive' and " + "pfo3.status = pfos.status ", /* 19 */ "Select distinct * " + "from " + SEPARATOR + "Portfolios1 pfos, " + "pfos.positions.values Pos1, " + SEPARATOR + "Countries1 c1, c1.states sts1, sts1.districts dists1, dists1.cities ct1, dists1.villages villgs1, " + SEPARATOR + "Countries2 c2, " + SEPARATOR + "Countries3 c3, " + SEPARATOR + "Portfolios3 pfo3 " + "where Pos1.secId = 'YHOO' and " + "c1.name = c2.name or " + "pfo3.status != 'inactive' and " + "pfo3.status = pfos.status and villgs1.name = 'MAHARASHTRA_VILLAGE1' ", /* 20 */ "Select distinct * " + "from " + SEPARATOR + "Portfolios1 pfos, " + "pfos.positions.values Pos1, " + SEPARATOR + "Countries1 c1, c1.states sts1, sts1.districts dists1, dists1.cities ct1, dists1.villages villgs1, " + SEPARATOR + "Countries2 c2, " + SEPARATOR + "Countries3 c3, " + SEPARATOR + "Portfolios3 pfo3 " + "where Pos1.secId = 'YHOO' and " + "c1.name = c2.name or " + "pfo3.status != 'inactive' and " + "pfo3.status = pfos.status and villgs1.name = 'MAHARASHTRA_VILLAGE1' or pfos.ID != 0", /* 21 */ "Select distinct * " + "from " + SEPARATOR + "Portfolios1 pfos, " + "pfos.positions.values Pos1, " + SEPARATOR + "Countries1 c1, c1.states sts1, sts1.districts dists1, dists1.cities ct1, dists1.villages villgs1, " + SEPARATOR + "Countries2 c2, " + SEPARATOR + "Countries3 c3, " + SEPARATOR + "Portfolios3 pfo3 " + "where Pos1.secId = 'YHOO' and " + "c1.name = c2.name or " + "pfo3.status != 'inactive' and " + "pfo3.status = pfos.status and villgs1.name = 'MAHARASHTRA_VILLAGE1' or pfos.ID != 0", /* 22 */ "Select distinct * " + "from " + SEPARATOR + "Portfolios1 pfos, " + "pfos.positions.values Pos1, " + SEPARATOR + "Countries1 c1, c1.states sts1, sts1.districts dists1, dists1.cities ct1, dists1.villages villgs1, " + SEPARATOR + "Countries2 c2, " + SEPARATOR + "Countries3 c3, " + SEPARATOR + "Portfolios3 pfo3 " + "where Pos1.secId = 'YHOO' and " + "c1.name = c2.name or " + "pfo3.status != 'inactive' and " + "pfo3.status = pfos.status and villgs1.name = 'MAHARASHTRA_VILLAGE1' or pfos.ID != 0", /* 23 */ "Select distinct * " + "from " + SEPARATOR + "Portfolios1 pfos, " + "pfos.positions.values Pos1, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2, " + SEPARATOR + "Countries3 c3, " + SEPARATOR + "Portfolios3 pfo3 " + "where Pos1.secId = 'YHOO' and " + "(c1.name = c2.name or " + "pfo3.status != 'inactive') and " + "pfo3.status = pfos.status ", /* 24 */ "Select distinct * " + "from " + SEPARATOR + "Portfolios1 pfos, " + "pfos.positions.values Pos1, " + SEPARATOR + "Countries1 c1, c1.states s1, " + SEPARATOR + "Countries2 c2, c2.states s2, " + SEPARATOR + "Countries3 c3, " + SEPARATOR + "Portfolios3 pfo3 " + "where Pos1.secId = 'YHOO' and " + "((c1.name = c2.name or " + "pfo3.status != 'inactive') and " + "pfo3.status = pfos.status) or s1.name = 'MAHARASHTRA' and s2.name != 'MAHARASHTRA'", /* 25 */ "Select distinct * " + "from " + SEPARATOR + "Portfolios1 pfos, " + "pfos.positions.values Pos1, " + SEPARATOR + "Countries1 c1, c1.states sts1, sts1.districts dists1, dists1.cities ct1, dists1.villages villgs1, " + SEPARATOR + "Countries2 c2, " + SEPARATOR + "Countries3 c3, " + SEPARATOR + "Portfolios3 pfo3 " + "where (Pos1.secId = 'YHOO' and " + "c1.name = c2.name or " + "pfo3.status != 'inactive') and " + "pfo3.status = pfos.status and villgs1.name = 'MAHARASHTRA_VILLAGE1' ", /* 26 */ "Select distinct * " + "from " + SEPARATOR + "Portfolios1 pfos, " + "pfos.positions.values Pos1, " + SEPARATOR + "Countries1 c1, c1.states sts1, sts1.districts dists1, dists1.cities ct1, dists1.villages villgs1, " + SEPARATOR + "Countries2 c2, " + SEPARATOR + "Countries3 c3, " + SEPARATOR + "Portfolios3 pfo3 " + "where Pos1.secId = 'YHOO' and " + "c1.name = c2.name or " + "pfo3.status != 'inactive' and " + "pfo3.status = pfos.status and (villgs1.name = 'MAHARASHTRA_VILLAGE1' or pfos.ID != 0)", /* 27 */ "Select distinct * " + "from " + SEPARATOR + "Portfolios1 pfos, " + "pfos.positions.values Pos1, " + SEPARATOR + "Countries1 c1, c1.states sts1, sts1.districts dists1, dists1.cities ct1, dists1.villages villgs1, " + SEPARATOR + "Countries2 c2, " + SEPARATOR + "Countries3 c3, " + SEPARATOR + "Portfolios3 pfo3 " + "where Pos1.secId = 'YHOO' and " + "(c1.name = c2.name or " + "pfo3.status != 'inactive') and " + "pfo3.status = pfos.status and (villgs1.name = 'MAHARASHTRA_VILLAGE1' or pfos.ID != 0)", /* 28 */ "Select distinct * " + "from " + SEPARATOR + "Portfolios1 pfos, " + "pfos.positions.values Pos1, " + SEPARATOR + "Countries1 c1, c1.states sts1, sts1.districts dists1, dists1.cities ct1, dists1.villages villgs1, " + SEPARATOR + "Countries2 c2, " + SEPARATOR + "Countries3 c3, " + SEPARATOR + "Portfolios3 pfo3 " + "where Pos1.secId = 'YHOO' and " + "(c1.name = c2.name or " + "pfo3.status != 'inactive' and " + "pfo3.status = pfos.status and (villgs1.name = 'MAHARASHTRA_VILLAGE1' or pfos.ID != 0))", /* 29 */ "select distinct * from " + SEPARATOR + "Portfolios1 pf1, " + SEPARATOR + "Portfolios2 pf2, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2 " + "where pf1.status = pf2.status or false or c1.name = c2.name", /* 30 */ "select distinct * from " + SEPARATOR + "Portfolios1 pf1, pf1.positions.values pos1, " + SEPARATOR + "Portfolios2 pf2, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2 " + "where pf1.status = pf2.status or c1.name = c2.name and pos1.secId = 'IBM' and true", /* 31 */ "select distinct * from " + SEPARATOR + "Portfolios1 pf1, " + SEPARATOR + "Portfolios2 pf2, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2 " + "where pf1.status = pf2.status or c1.name = c2.name and true", /* 32 */ "select distinct * from " + SEPARATOR + "Portfolios1 pf1, pf1.positions.values pos1, " + SEPARATOR + "Portfolios2 pf2, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2 " + "where pf1.status = pf2.status and c1.name = c2.name or pos1.secId = 'IBM'", /* 33 */ "select distinct * from " + SEPARATOR + "Portfolios1 pf1, " + SEPARATOR + "Portfolios2 pf2, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2 " + "where pf1.status = pf2.status or c1.name = c2.name and false", /* 34 */ "select distinct * from " + SEPARATOR + "Portfolios1 pf1, " + SEPARATOR + "Portfolios2 pf2, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2 " + "where pf1.status = pf2.status or false or c1.name = c2.name or pf2.ID = 1 or c1.name = 'INDIA'", /* 35 */ "select distinct * from " + SEPARATOR + "Portfolios1 pf1, pf1.positions.values pos1, " + SEPARATOR + "Portfolios2 pf2, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2 " + "where pf1.status = pf2.status or c1.name = c2.name and pos1.secId = 'IBM' and true or pf1.ID != 3", /* 36 */ "select distinct * from " + SEPARATOR + "Portfolios1 pf1, " + SEPARATOR + "Portfolios2 pf2, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2 " + "where pf1.status = pf2.status or c1.name = c2.name and true or pf1.ID = pf2.ID", /* 37 */ "select distinct * from " + SEPARATOR + "Portfolios1 pf1, pf1.positions.values pos1, " + SEPARATOR + "Portfolios2 pf2, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2 " + "where pf1.status = pf2.status and c1.name = c2.name or pos1.secId = 'IBM' or false and pf1.ID = pf2.ID", /* 38 */ "select distinct * from " + SEPARATOR + "Portfolios1 pf1, " + SEPARATOR + "Portfolios2 pf2, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2 " + "where pf1.status = pf2.status or c1.name = c2.name and false and pf1.ID = pf2.ID", /* 39 */ "select distinct * from " + SEPARATOR + "Portfolios1 pf1, " + SEPARATOR + "Portfolios2 pf2, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2 " + "where pf1.status = pf2.status or false or c1.name = c2.name or c1.name = 'INDIA' or pf1.ID = 2", /* 40 */ "select distinct * from " + SEPARATOR + "Portfolios1 pf1, pf1.positions.values pos1, " + SEPARATOR + "Portfolios2 pf2, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2 " + "where pf1.status = pf2.status or c1.name = c2.name and pos1.secId = 'IBM' and true", /* 41 */ "select distinct * from " + SEPARATOR + "Portfolios1 pf1, " + SEPARATOR + "Portfolios2 pf2, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2 " + "where pf1.status = pf2.status or c1.name = c2.name and true or c1.name = 'INDIA' or pf1.ID = 2", /* 42 */ "select distinct * from " + SEPARATOR + "Portfolios1 pf1, pf1.positions.values pos1, " + SEPARATOR + "Portfolios2 pf2, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2 " + "where pf1.status = pf2.status and c1.name = c2.name or pos1.secId = 'IBM' or c1.name = 'INDIA' or pf2.ID = 2", /* 43 */ "select distinct * from " + SEPARATOR + "Portfolios1 pf1, " + SEPARATOR + "Portfolios2 pf2, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2 " + "where pf1.status = pf2.status or c1.name = c2.name and false or c1.name = 'INDIA' or pf2.ID = 2", // FAILING /*44*/ "select distinct * from " + SEPARATOR + "Portfolios1 pf1, " + SEPARATOR + "Portfolios2 pf2, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2 " + "where pf1.status = pf2.status or false or c1.name = c2.name or pf2.ID = 1 or c1.name = 'INDIA' or c1.name = 'INDIA' or pf1.ID = 2", /* 45 */ "select distinct * from " + SEPARATOR + "Portfolios1 pf1, pf1.positions.values pos1, " + SEPARATOR + "Portfolios2 pf2, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2 " + "where pf1.status = pf2.status or c1.name = c2.name and pos1.secId = 'IBM' and true or pf1.ID != 3 or c1.name = 'INDIA' or pf1.ID = 2", /* 46 */ "select distinct * from " + SEPARATOR + "Portfolios1 pf1, " + SEPARATOR + "Portfolios2 pf2, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2 " + "where pf1.status = pf2.status or c1.name = c2.name and true or pf1.ID = pf2.ID or c1.name = 'INDIA' or pf2.ID = 2", /* 47 */ "select distinct * from " + SEPARATOR + "Portfolios1 pf1, pf1.positions.values pos1, " + SEPARATOR + "Portfolios2 pf2, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2 " + "where pf1.status = pf2.status and c1.name = c2.name or pos1.secId = 'IBM' or false and pf1.ID = pf2.ID or c1.name = 'INDIA' or pf1.ID = 2", /* 48 */ "select distinct * from " + SEPARATOR + "Portfolios1 pf1, " + SEPARATOR + "Portfolios2 pf2, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2 " + "where pf1.status = pf2.status or c1.name = c2.name and false and pf1.ID = pf2.ID or c1.name = 'INDIA' or pf2.ID = 2", /* 49 */ "select distinct * from " + SEPARATOR + "Portfolios1 pf1, " + SEPARATOR + "Portfolios2 pf2, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2 " + "where pf1.status = pf2.status or false or c1.name = c2.name and c1.name = 'INDIA' and pf1.ID = 2", /* 50 */ "select distinct * from " + SEPARATOR + "Portfolios1 pf1, pf1.positions.values pos1, " + SEPARATOR + "Portfolios2 pf2, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2 " + "where pf1.status = pf2.status or c1.name = c2.name and pos1.secId = 'IBM' and true and c1.name = 'INDIA' and pf2.ID = 2", /* 51 */ "select distinct * from " + SEPARATOR + "Portfolios1 pf1, " + SEPARATOR + "Portfolios2 pf2, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2 " + "where pf1.status = pf2.status or c1.name = c2.name and true and c1.name = 'INDIA' and pf2.ID = 2", /* 52 */ "select distinct * from " + SEPARATOR + "Portfolios1 pf1, pf1.positions.values pos1, " + SEPARATOR + "Portfolios2 pf2, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2 " + "where pf1.status = pf2.status and c1.name = c2.name or pos1.secId = 'IBM' and c1.name = 'INDIA' and pf2.ID = 2", /* 53 */ "select distinct * from " + SEPARATOR + "Portfolios1 pf1, " + SEPARATOR + "Portfolios2 pf2, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2 " + "where pf1.status = pf2.status or c1.name = c2.name and false and c1.name = 'INDIA' and pf2.ID = 2", /* 54 */ "select distinct * from " + SEPARATOR + "Portfolios1 pf1, " + SEPARATOR + "Portfolios2 pf2, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2 " + "where pf1.status = pf2.status or false or c1.name = c2.name or pf2.ID = 1 or c1.name = 'INDIA' and c1.name = 'INDIA' and pf2.ID = 2", /* 55 */ "select distinct * from " + SEPARATOR + "Portfolios1 pf1, pf1.positions.values pos1, " + SEPARATOR + "Portfolios2 pf2, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2 " + "where pf1.status = pf2.status or c1.name = c2.name and pos1.secId = 'IBM' and true or pf1.ID != 3 and c1.name = 'INDIA' and pf2.ID = 2", /* 56 */ "select distinct * from " + SEPARATOR + "Portfolios1 pf1, " + SEPARATOR + "Portfolios2 pf2, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2 " + "where pf1.status = pf2.status or c1.name = c2.name and true or pf1.ID = pf2.ID and c1.name = 'INDIA' and pf2.ID = 2", /* 57 */ "select distinct * from " + SEPARATOR + "Portfolios1 pf1, pf1.positions.values pos1, " + SEPARATOR + "Portfolios2 pf2, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2 " + "where pf1.status = pf2.status and c1.name = c2.name or pos1.secId = 'IBM' or false and pf1.ID = pf2.ID and c1.name = 'INDIA' and pf2.ID = 2", /* 58 */ "select distinct * from " + SEPARATOR + "Portfolios1 pf1, " + SEPARATOR + "Portfolios2 pf2, " + SEPARATOR + "Countries1 c1, " + SEPARATOR + "Countries2 c2 " + "where pf1.status = pf2.status or c1.name = c2.name and false and pf1.ID = pf2.ID and c1.name = 'INDIA' and pf2.ID = 2",}; SelectResults[][] rs = new SelectResults[queries.length][2]; for (int i = 0; i < queries.length; i++) { CacheUtils.log("Running query number :" + (i + 1) + " without Index"); Query q = null; q = CacheUtils.getQueryService().newQuery(queries[i]); rs[i][0] = (SelectResults) q.execute(); } CacheUtils.log("Now creating Indexes"); createIndex(); CacheUtils.log("All indexes created "); for (int j = 0; j < queries.length; j++) { CacheUtils.log("Running query number :" + (j + 1) + " with Index"); if (j == 4) { System.out.print("Hi"); } Query q2 = null; q2 = CacheUtils.getQueryService().newQuery(queries[j]); QueryObserverImpl observer = new QueryObserverImpl(); QueryObserverHolder.setInstance(observer); try { rs[j][1] = (SelectResults) q2.execute(); if (!observer.isIndexesUsed) { fail("------------ INDEX IS NOT USED FOR THE QUERY:: " + q2.getQueryString()); } } catch (Exception e) { System.out.println( "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!CAUGHT EXCPETION AT QUERY NO: " + (j + 1)); e.printStackTrace(); fail(); } } StructSetOrResultsSet ssORrs = new StructSetOrResultsSet(); ssORrs.CompareQueryResultsWithoutAndWithIndexes(rs, queries.length, queries); } finally { IndexManager.TEST_RANGEINDEX_ONLY = false; } } public void createIndex() throws Exception { QueryService qs; qs = CacheUtils.getQueryService(); qs.createIndex("Portfolio1secIdIdx", IndexType.FUNCTIONAL, "b.secId", SEPARATOR + "Portfolios1 pf, pf.positions.values b"); qs.createIndex("Portfolio1IdIdx1", IndexType.FUNCTIONAL, "pf.ID", SEPARATOR + "Portfolios1 pf, pf.positions.values b"); qs.createIndex("Portfolio1Idindex2", IndexType.FUNCTIONAL, "pf.ID", SEPARATOR + "Portfolios1 pf"); qs.createIndex("Portfolio1statusIdx1", IndexType.FUNCTIONAL, "pf.status", SEPARATOR + "Portfolios1 pf, pf.positions.values b"); qs.createIndex("Portfolio1statusIdx2", IndexType.FUNCTIONAL, "pf.status", SEPARATOR + "Portfolios1 pf"); qs.createIndex("Portfolio2secIdIdx", IndexType.FUNCTIONAL, "b.secId", SEPARATOR + "Portfolios2 pf, pf.positions.values b"); qs.createIndex("Portfolio2IdIdx1", IndexType.FUNCTIONAL, "pf.ID", SEPARATOR + "Portfolios2 pf, pf.positions.values b"); qs.createIndex("Portfolio2Idindex2", IndexType.FUNCTIONAL, "pf.ID", SEPARATOR + "Portfolios2 pf"); qs.createIndex("Portfolio2statusIdx1", IndexType.FUNCTIONAL, "pf.status", SEPARATOR + "Portfolios2 pf, pf.positions.values b"); qs.createIndex("Portfolio2statusIdx2", IndexType.FUNCTIONAL, "pf.status", SEPARATOR + "Portfolios2 pf"); qs.createIndex("Portfolio3secIdIdx", IndexType.FUNCTIONAL, "b.secId", SEPARATOR + "Portfolios3 pf, pf.positions.values b"); qs.createIndex("Portfolio3IdIdx1", IndexType.FUNCTIONAL, "pf.ID", SEPARATOR + "Portfolios3 pf, pf.positions.values b"); qs.createIndex("Portfolio3Idindex2", IndexType.FUNCTIONAL, "pf.ID", SEPARATOR + "Portfolios3 pf"); qs.createIndex("Portfolio3statusIdx1", IndexType.FUNCTIONAL, "pf.status", SEPARATOR + "Portfolios3 pf, pf.positions.values b"); qs.createIndex("Portfolio3statusIdx2", IndexType.FUNCTIONAL, "pf.status", SEPARATOR + "Portfolios3 pf"); /* Indices on region1 */ qs.createIndex("villageName1", IndexType.FUNCTIONAL, "v.name", SEPARATOR + "Countries1 c, c.states s, s.districts d, d.cities ct, d.villages v"); qs.createIndex("cityName1", IndexType.FUNCTIONAL, "ct.name", SEPARATOR + "Countries1 c, c.states s, s.districts d, d.cities ct, d.villages v"); qs.createIndex("countryNameA", IndexType.FUNCTIONAL, "c.name", SEPARATOR + "Countries1 c, c.states s, s.districts d, d.cities ct, d.villages v"); qs.createIndex("countryNameB", IndexType.FUNCTIONAL, "c.name", SEPARATOR + "Countries1 c"); /* Indices on region2 */ qs.createIndex("stateName2", IndexType.FUNCTIONAL, "s.name", SEPARATOR + "Countries2 c, c.states s, s.districts d, d.cities ct, d.villages v"); qs.createIndex("cityName2", IndexType.FUNCTIONAL, "ct.name", SEPARATOR + "Countries2 c, c.states s, s.districts d, d.cities ct, d.villages v"); qs.createIndex("countryNameB", IndexType.FUNCTIONAL, "c.name", SEPARATOR + "Countries2 c"); /* Indices on region3 */ qs.createIndex("districtName3", IndexType.FUNCTIONAL, "d.name", SEPARATOR + "Countries3 c, c.states s, s.districts d, d.cities ct, d.villages v"); qs.createIndex("villageName3", IndexType.FUNCTIONAL, "v.name", SEPARATOR + "Countries3 c, c.states s, s.districts d, d.cities ct, d.villages v"); qs.createIndex("cityName3", IndexType.FUNCTIONAL, "ct.name", SEPARATOR + "Countries3 c, c.states s, s.districts d, d.cities ct, d.villages v"); }// end of createIndex @After public void tearDown() throws Exception { CacheUtils.closeCache(); } class QueryObserverImpl extends QueryObserverAdapter { boolean isIndexesUsed = false; ArrayList indexesUsed = new ArrayList(); @Override public void beforeIndexLookup(Index index, int oper, Object key) { indexesUsed.add(index.getName()); } @Override public void afterIndexLookup(Collection results) { if (results != null) { isIndexesUsed = true; } } } }
apache/flink
36,922
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/common/CommonExecLookupJoin.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.planner.plan.nodes.exec.common; import org.apache.flink.api.common.functions.FlatMapFunction; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.dag.Transformation; import org.apache.flink.api.java.typeutils.RowTypeInfo; import org.apache.flink.configuration.ReadableConfig; import org.apache.flink.streaming.api.datastream.AsyncDataStream; import org.apache.flink.streaming.api.functions.ProcessFunction; import org.apache.flink.streaming.api.functions.async.AsyncFunction; import org.apache.flink.streaming.api.operators.ProcessOperator; import org.apache.flink.streaming.api.operators.SimpleOperatorFactory; import org.apache.flink.streaming.api.operators.StreamOperatorFactory; import org.apache.flink.streaming.api.operators.async.AsyncWaitOperatorFactory; import org.apache.flink.table.api.TableException; import org.apache.flink.table.catalog.DataTypeFactory; import org.apache.flink.table.connector.ChangelogMode; import org.apache.flink.table.connector.source.LookupTableSource; import org.apache.flink.table.data.RowData; import org.apache.flink.table.data.conversion.DataStructureConverter; import org.apache.flink.table.data.conversion.DataStructureConverters; import org.apache.flink.table.functions.AsyncTableFunction; import org.apache.flink.table.functions.TableFunction; import org.apache.flink.table.functions.UserDefinedFunction; import org.apache.flink.table.functions.UserDefinedFunctionHelper; import org.apache.flink.table.legacy.sources.LookupableTableSource; import org.apache.flink.table.legacy.sources.TableSource; import org.apache.flink.table.planner.calcite.FlinkTypeFactory; import org.apache.flink.table.planner.codegen.CodeGeneratorContext; import org.apache.flink.table.planner.codegen.FilterCodeGenerator; import org.apache.flink.table.planner.codegen.LookupJoinCodeGenerator; import org.apache.flink.table.planner.delegation.PlannerBase; import org.apache.flink.table.planner.plan.nodes.exec.ExecEdge; import org.apache.flink.table.planner.plan.nodes.exec.ExecNode; import org.apache.flink.table.planner.plan.nodes.exec.ExecNodeBase; import org.apache.flink.table.planner.plan.nodes.exec.ExecNodeConfig; import org.apache.flink.table.planner.plan.nodes.exec.ExecNodeContext; import org.apache.flink.table.planner.plan.nodes.exec.InputProperty; import org.apache.flink.table.planner.plan.nodes.exec.spec.TemporalTableSourceSpec; import org.apache.flink.table.planner.plan.nodes.exec.utils.ExecNodeUtil; import org.apache.flink.table.planner.plan.schema.LegacyTableSourceTable; import org.apache.flink.table.planner.plan.schema.TableSourceTable; import org.apache.flink.table.planner.plan.utils.FunctionCallUtil; import org.apache.flink.table.planner.plan.utils.LookupJoinUtil; import org.apache.flink.table.planner.utils.JavaScalaConversionUtil; import org.apache.flink.table.planner.utils.ShortcutUtils; import org.apache.flink.table.runtime.collector.ListenableCollector; import org.apache.flink.table.runtime.collector.TableFunctionResultFuture; import org.apache.flink.table.runtime.generated.GeneratedCollector; import org.apache.flink.table.runtime.generated.GeneratedFilterCondition; import org.apache.flink.table.runtime.generated.GeneratedFunction; import org.apache.flink.table.runtime.generated.GeneratedResultFuture; import org.apache.flink.table.runtime.keyselector.RowDataKeySelector; import org.apache.flink.table.runtime.operators.TableKeyedAsyncWaitOperatorFactory; import org.apache.flink.table.runtime.operators.join.FlinkJoinType; import org.apache.flink.table.runtime.operators.join.lookup.AsyncLookupJoinRunner; import org.apache.flink.table.runtime.operators.join.lookup.AsyncLookupJoinWithCalcRunner; import org.apache.flink.table.runtime.operators.join.lookup.LookupJoinRunner; import org.apache.flink.table.runtime.operators.join.lookup.LookupJoinWithCalcRunner; import org.apache.flink.table.runtime.operators.join.lookup.ResultRetryStrategy; import org.apache.flink.table.runtime.types.PlannerTypeUtils; import org.apache.flink.table.runtime.types.TypeInfoDataTypeConverter; import org.apache.flink.table.runtime.typeutils.InternalSerializers; import org.apache.flink.table.runtime.typeutils.InternalTypeInfo; import org.apache.flink.table.types.logical.LogicalType; import org.apache.flink.table.types.logical.RowType; import org.apache.flink.types.RowKind; import org.apache.flink.util.Preconditions; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonInclude; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonProperty; import org.apache.calcite.plan.RelOptTable; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rex.RexNode; import org.apache.calcite.rex.RexUtil; import org.apache.calcite.tools.RelBuilder; import org.apache.commons.lang3.StringUtils; import javax.annotation.Nullable; import java.util.Arrays; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; import static org.apache.flink.table.planner.calcite.FlinkTypeFactory.toLogicalType; import static org.apache.flink.table.planner.utils.ShortcutUtils.unwrapTypeFactory; import static org.apache.flink.util.Preconditions.checkArgument; import static org.apache.flink.util.Preconditions.checkNotNull; /** * Base {@link ExecNode} for temporal table join which shares most methods. * * <p>For a lookup join query: * * <pre> * SELECT T.id, T.content, D.age * FROM T JOIN userTable FOR SYSTEM_TIME AS OF T.proctime AS D * ON T.content = concat(D.name, '!') AND D.age = 11 AND T.id = D.id * WHERE D.name LIKE 'Jack%' * </pre> * * <p>The LookupJoin physical node encapsulates the following RelNode tree: * * <pre> * Join (l.name = r.name) * / \ * RelNode Calc (concat(name, "!") as name, name LIKE 'Jack%') * | * DimTable (lookup-keys: age=11, id=l.id) * (age, id, name) * </pre> * * <ul> * <li>lookupKeys: [$0=11, $1=l.id] ($0 and $1 is the indexes of age and id in dim table) * <li>calcOnTemporalTable: calc on temporal table rows before join * <li>joinCondition: join condition on temporal table rows after calc * </ul> * * <p>The workflow of lookup join: * * <p>1) lookup records dimension table using the lookup-keys <br> * 2) project & filter on the lookup-ed records <br> * 3) join left input record and lookup-ed records <br> * 4) only outputs the rows which match to the condition <br> */ public abstract class CommonExecLookupJoin extends ExecNodeBase<RowData> { public static final String LOOKUP_JOIN_TRANSFORMATION = "lookup-join"; public static final String LOOKUP_JOIN_MATERIALIZE_TRANSFORMATION = "lookup-join-materialize"; public static final String LOOKUP_JOIN_KEY_ORDERED_TRANSFORMATION = "lookup-join-key-ordered"; public static final String FIELD_NAME_JOIN_TYPE = "joinType"; public static final String FIELD_NAME_PRE_FILTER_CONDITION = "preFilterCondition"; public static final String FIELD_NAME_REMAINING_JOIN_CONDITION = "joinCondition"; public static final String FIELD_NAME_TEMPORAL_TABLE = "temporalTable"; public static final String FIELD_NAME_LOOKUP_KEYS = "lookupKeys"; public static final String FIELD_NAME_PROJECTION_ON_TEMPORAL_TABLE = "projectionOnTemporalTable"; public static final String FIELD_NAME_FILTER_ON_TEMPORAL_TABLE = "filterOnTemporalTable"; public static final String FIELD_NAME_INPUT_CHANGELOG_MODE = "inputChangelogMode"; public static final String FIELD_NAME_ASYNC_OPTIONS = "asyncOptions"; public static final String FIELD_NAME_RETRY_OPTIONS = "retryOptions"; public static final String FIELD_NAME_PREFER_CUSTOM_SHUFFLE = "preferCustomShuffle"; public static final String CUSTOM_SHUFFLE_TRANSFORMATION = "custom-shuffle"; @JsonProperty(FIELD_NAME_JOIN_TYPE) private final FlinkJoinType joinType; /** * lookup keys: the key is index in dim table. the value is source of lookup key either constant * or field from right table. */ @JsonProperty(FIELD_NAME_LOOKUP_KEYS) private final Map<Integer, FunctionCallUtil.FunctionParam> lookupKeys; @JsonProperty(FIELD_NAME_TEMPORAL_TABLE) private final TemporalTableSourceSpec temporalTableSourceSpec; @JsonProperty(FIELD_NAME_PROJECTION_ON_TEMPORAL_TABLE) private final @Nullable List<RexNode> projectionOnTemporalTable; @JsonProperty(FIELD_NAME_FILTER_ON_TEMPORAL_TABLE) private final @Nullable RexNode filterOnTemporalTable; /** pre-filter condition on left input except lookup keys. */ @JsonProperty(FIELD_NAME_PRE_FILTER_CONDITION) @JsonInclude(JsonInclude.Include.NON_NULL) private final @Nullable RexNode preFilterCondition; /** remaining join condition except pre-filter & equi-conditions except lookup keys. */ @JsonProperty(FIELD_NAME_REMAINING_JOIN_CONDITION) private final @Nullable RexNode remainingJoinCondition; @JsonProperty(FIELD_NAME_INPUT_CHANGELOG_MODE) private final ChangelogMode inputChangelogMode; @JsonProperty(FIELD_NAME_ASYNC_OPTIONS) @JsonInclude(JsonInclude.Include.NON_NULL) private final @Nullable FunctionCallUtil.AsyncOptions asyncLookupOptions; @JsonProperty(FIELD_NAME_RETRY_OPTIONS) @JsonInclude(JsonInclude.Include.NON_NULL) private final @Nullable LookupJoinUtil.RetryLookupOptions retryOptions; @JsonProperty(FIELD_NAME_PREFER_CUSTOM_SHUFFLE) private final boolean preferCustomShuffle; protected CommonExecLookupJoin( int id, ExecNodeContext context, ReadableConfig persistedConfig, FlinkJoinType joinType, @Nullable RexNode preFilterCondition, @Nullable RexNode remainingJoinCondition, // TODO: refactor this into TableSourceTable, once legacy TableSource is removed TemporalTableSourceSpec temporalTableSourceSpec, Map<Integer, FunctionCallUtil.FunctionParam> lookupKeys, @Nullable List<RexNode> projectionOnTemporalTable, @Nullable RexNode filterOnTemporalTable, @Nullable FunctionCallUtil.AsyncOptions asyncLookupOptions, @Nullable LookupJoinUtil.RetryLookupOptions retryOptions, ChangelogMode inputChangelogMode, List<InputProperty> inputProperties, RowType outputType, String description, boolean preferCustomShuffle) { super(id, context, persistedConfig, inputProperties, outputType, description); checkArgument(inputProperties.size() == 1); this.joinType = checkNotNull(joinType); this.preFilterCondition = preFilterCondition; this.remainingJoinCondition = remainingJoinCondition; this.lookupKeys = Collections.unmodifiableMap(checkNotNull(lookupKeys)); this.temporalTableSourceSpec = checkNotNull(temporalTableSourceSpec); this.projectionOnTemporalTable = projectionOnTemporalTable; this.filterOnTemporalTable = filterOnTemporalTable; this.inputChangelogMode = inputChangelogMode; this.asyncLookupOptions = asyncLookupOptions; this.retryOptions = retryOptions; this.preferCustomShuffle = preferCustomShuffle; } public TemporalTableSourceSpec getTemporalTableSourceSpec() { return temporalTableSourceSpec; } protected Transformation<RowData> createJoinTransformation( PlannerBase planner, ExecNodeConfig config, boolean upsertMaterialize, boolean lookupKeyContainsPrimaryKey) { RelOptTable temporalTable = temporalTableSourceSpec.getTemporalTable( planner.getFlinkContext(), unwrapTypeFactory(planner)); // validate whether the node is valid and supported. validate(temporalTable); final ExecEdge inputEdge = getInputEdges().get(0); RowType inputRowType = (RowType) inputEdge.getOutputType(); RowType tableSourceRowType = FlinkTypeFactory.toLogicalRowType(temporalTable.getRowType()); RowType resultRowType = (RowType) getOutputType(); validateLookupKeyType(lookupKeys, inputRowType, tableSourceRowType); boolean isAsyncEnabled = null != asyncLookupOptions; ResultRetryStrategy retryStrategy = retryOptions != null ? retryOptions.toRetryStrategy() : null; boolean tryApplyCustomShuffle = preferCustomShuffle && !upsertMaterialize; UserDefinedFunction lookupFunction = LookupJoinUtil.getLookupFunction( temporalTable, lookupKeys.keySet(), planner.getFlinkContext().getClassLoader(), isAsyncEnabled, retryStrategy, tryApplyCustomShuffle); Transformation<RowData> inputTransformation = (Transformation<RowData>) inputEdge.translateToPlan(planner); if (tryApplyCustomShuffle) { inputTransformation = LookupJoinUtil.tryApplyCustomShufflePartitioner( planner, temporalTable, inputRowType, lookupKeys, inputTransformation, inputChangelogMode, createTransformationMeta(CUSTOM_SHUFFLE_TRANSFORMATION, config)); } UserDefinedFunctionHelper.prepareInstance(config, lookupFunction); boolean isLeftOuterJoin = joinType == FlinkJoinType.LEFT; if (isAsyncEnabled) { assert lookupFunction instanceof AsyncTableFunction; } if (upsertMaterialize) { // upsertMaterialize only works on sync lookup mode, async lookup is unsupported. assert !isAsyncEnabled && !inputChangelogMode.containsOnly(RowKind.INSERT); return createSyncLookupJoinWithState( inputTransformation, temporalTable, config, planner.getFlinkContext().getClassLoader(), lookupKeys, (TableFunction<Object>) lookupFunction, planner.createRelBuilder(), inputRowType, tableSourceRowType, resultRowType, isLeftOuterJoin, planner.getExecEnv().getConfig().isObjectReuseEnabled(), lookupKeyContainsPrimaryKey); } else { StreamOperatorFactory<RowData> operatorFactory; if (isAsyncEnabled) { if (asyncLookupOptions.keyOrdered) { return createKeyOrderedAsyncLookupJoin( inputTransformation, temporalTable, config, planner.getFlinkContext().getClassLoader(), lookupKeys, (AsyncTableFunction<Object>) lookupFunction, planner.createRelBuilder(), inputRowType, tableSourceRowType, resultRowType, isLeftOuterJoin, asyncLookupOptions); } operatorFactory = createAsyncLookupJoin( temporalTable, config, planner.getFlinkContext().getClassLoader(), lookupKeys, (AsyncTableFunction<Object>) lookupFunction, planner.createRelBuilder(), inputRowType, tableSourceRowType, resultRowType, isLeftOuterJoin, asyncLookupOptions, null); } else { operatorFactory = createSyncLookupJoin( temporalTable, config, planner.getFlinkContext().getClassLoader(), lookupKeys, (TableFunction<Object>) lookupFunction, planner.createRelBuilder(), inputRowType, tableSourceRowType, resultRowType, isLeftOuterJoin, planner.getExecEnv().getConfig().isObjectReuseEnabled()); } return ExecNodeUtil.createOneInputTransformation( inputTransformation, createTransformationMeta(LOOKUP_JOIN_TRANSFORMATION, config), operatorFactory, InternalTypeInfo.of(resultRowType), inputTransformation.getParallelism(), false); } } protected abstract Transformation<RowData> createKeyOrderedAsyncLookupJoin( Transformation<RowData> inputTransformation, RelOptTable temporalTable, ExecNodeConfig config, ClassLoader classLoader, Map<Integer, FunctionCallUtil.FunctionParam> allLookupKeys, AsyncTableFunction<Object> asyncLookupFunction, RelBuilder relBuilder, RowType inputRowType, RowType tableSourceRowType, RowType resultRowType, boolean isLeftOuterJoin, LookupJoinUtil.AsyncOptions asyncLookupOptions); protected abstract Transformation<RowData> createSyncLookupJoinWithState( Transformation<RowData> inputTransformation, RelOptTable temporalTable, ExecNodeConfig config, ClassLoader classLoader, Map<Integer, FunctionCallUtil.FunctionParam> allLookupKeys, TableFunction<?> syncLookupFunction, RelBuilder relBuilder, RowType inputRowType, RowType tableSourceRowType, RowType resultRowType, boolean isLeftOuterJoin, boolean isObjectReuseEnabled, boolean lookupKeyContainsPrimaryKey); protected void validateLookupKeyType( final Map<Integer, FunctionCallUtil.FunctionParam> lookupKeys, final RowType inputRowType, final RowType tableSourceRowType) { final List<String> imCompatibleConditions = new LinkedList<>(); lookupKeys.entrySet().stream() .filter(entry -> entry.getValue() instanceof FunctionCallUtil.FieldRef) .forEach( entry -> { int rightKey = entry.getKey(); int leftKey = ((FunctionCallUtil.FieldRef) entry.getValue()).index; LogicalType leftType = inputRowType.getTypeAt(leftKey); LogicalType rightType = tableSourceRowType.getTypeAt(rightKey); boolean isCompatible = PlannerTypeUtils.isInteroperable(leftType, rightType); if (!isCompatible) { String leftName = inputRowType.getFieldNames().get(leftKey); String rightName = tableSourceRowType.getFieldNames().get(rightKey); imCompatibleConditions.add( String.format( "%s[%s]=%s[%s]", leftName, leftType, rightName, rightType)); } }); if (!imCompatibleConditions.isEmpty()) { throw new TableException( "Temporal table join requires equivalent condition " + "of the same type, but the condition is " + StringUtils.join(imCompatibleConditions, ",")); } } @SuppressWarnings("unchecked") protected StreamOperatorFactory<RowData> createAsyncLookupJoin( RelOptTable temporalTable, ExecNodeConfig config, ClassLoader classLoader, Map<Integer, FunctionCallUtil.FunctionParam> allLookupKeys, AsyncTableFunction<Object> asyncLookupFunction, RelBuilder relBuilder, RowType inputRowType, RowType tableSourceRowType, RowType resultRowType, boolean isLeftOuterJoin, LookupJoinUtil.AsyncOptions asyncLookupOptions, @Nullable RowDataKeySelector keySelector) { DataTypeFactory dataTypeFactory = ShortcutUtils.unwrapContext(relBuilder).getCatalogManager().getDataTypeFactory(); List<FunctionCallUtil.FunctionParam> convertedKeys = Arrays.stream(LookupJoinUtil.getOrderedLookupKeys(allLookupKeys.keySet())) .mapToObj(allLookupKeys::get) .collect(Collectors.toList()); LookupJoinCodeGenerator.GeneratedTableFunctionWithDataType<AsyncFunction<RowData, Object>> generatedFuncWithType = LookupJoinCodeGenerator.generateAsyncLookupFunction( config, classLoader, dataTypeFactory, inputRowType, tableSourceRowType, resultRowType, convertedKeys, asyncLookupFunction, StringUtils.join(temporalTable.getQualifiedName(), ".")); RelDataType projectionOutputRelDataType = getProjectionOutputRelDataType(relBuilder); RowType rightRowType = getRightOutputRowType(projectionOutputRelDataType, tableSourceRowType); // a projection or filter after table source scan GeneratedResultFuture<TableFunctionResultFuture<RowData>> generatedResultFuture = LookupJoinCodeGenerator.generateTableAsyncCollector( config, classLoader, "TableFunctionResultFuture", inputRowType, rightRowType, JavaScalaConversionUtil.toScala( Optional.ofNullable(remainingJoinCondition))); GeneratedFilterCondition generatedPreFilterCondition = FilterCodeGenerator.generateFilterCondition( config, classLoader, preFilterCondition, inputRowType); DataStructureConverter<?, ?> fetcherConverter = DataStructureConverters.getConverter(generatedFuncWithType.dataType()); AsyncFunction<RowData, RowData> asyncFunc; if (projectionOnTemporalTable != null) { // a projection or filter after table source scan GeneratedFunction<FlatMapFunction<RowData, RowData>> generatedCalc = LookupJoinCodeGenerator.generateCalcMapFunction( config, classLoader, JavaScalaConversionUtil.toScala(projectionOnTemporalTable), filterOnTemporalTable, projectionOutputRelDataType, tableSourceRowType); asyncFunc = new AsyncLookupJoinWithCalcRunner( generatedFuncWithType.tableFunc(), (DataStructureConverter<RowData, Object>) fetcherConverter, generatedCalc, generatedResultFuture, generatedPreFilterCondition, InternalSerializers.create(rightRowType), isLeftOuterJoin, asyncLookupOptions.asyncBufferCapacity); } else { // right type is the same as table source row type, because no calc after temporal table asyncFunc = new AsyncLookupJoinRunner( generatedFuncWithType.tableFunc(), (DataStructureConverter<RowData, Object>) fetcherConverter, generatedResultFuture, generatedPreFilterCondition, InternalSerializers.create(rightRowType), isLeftOuterJoin, asyncLookupOptions.asyncBufferCapacity); } if (asyncLookupOptions.keyOrdered) { Preconditions.checkState( AsyncDataStream.OutputMode.ORDERED.equals(asyncLookupOptions.asyncOutputMode)); return new TableKeyedAsyncWaitOperatorFactory<>( asyncFunc, keySelector, asyncLookupOptions.asyncTimeout, asyncLookupOptions.asyncBufferCapacity); } // Why not directly enable retry on 'AsyncWaitOperator'? because of two reasons: // 1. AsyncLookupJoinRunner has a 'stateful' resultFutureBuffer bind to each input record // (it's non-reenter-able) 2. can not lookup new value if cache empty values enabled when // chained with the new AsyncCachingLookupFunction. So similar to sync lookup join with // retry, use a 'RetryableAsyncLookupFunctionDelegator' to support retry. return new AsyncWaitOperatorFactory<>( asyncFunc, asyncLookupOptions.asyncTimeout, asyncLookupOptions.asyncBufferCapacity, asyncLookupOptions.asyncOutputMode); } private StreamOperatorFactory<RowData> createSyncLookupJoin( RelOptTable temporalTable, ExecNodeConfig config, ClassLoader classLoader, Map<Integer, FunctionCallUtil.FunctionParam> allLookupKeys, TableFunction<?> syncLookupFunction, RelBuilder relBuilder, RowType inputRowType, RowType tableSourceRowType, RowType resultRowType, boolean isLeftOuterJoin, boolean isObjectReuseEnabled) { return SimpleOperatorFactory.of( new ProcessOperator<>( createSyncLookupJoinFunction( temporalTable, config, classLoader, allLookupKeys, syncLookupFunction, relBuilder, inputRowType, tableSourceRowType, resultRowType, isLeftOuterJoin, isObjectReuseEnabled))); } protected RelDataType getProjectionOutputRelDataType(RelBuilder relBuilder) { return projectionOnTemporalTable != null ? RexUtil.createStructType(unwrapTypeFactory(relBuilder), projectionOnTemporalTable) : null; } protected RowType getRightOutputRowType( RelDataType projectionOutputRelDataType, RowType tableSourceRowType) { return projectionOutputRelDataType != null ? (RowType) toLogicalType(projectionOutputRelDataType) : tableSourceRowType; } protected ProcessFunction<RowData, RowData> createSyncLookupJoinFunction( RelOptTable temporalTable, ExecNodeConfig config, ClassLoader classLoader, Map<Integer, FunctionCallUtil.FunctionParam> allLookupKeys, TableFunction<?> syncLookupFunction, RelBuilder relBuilder, RowType inputRowType, RowType tableSourceRowType, RowType resultRowType, boolean isLeftOuterJoin, boolean isObjectReuseEnabled) { DataTypeFactory dataTypeFactory = ShortcutUtils.unwrapContext(relBuilder).getCatalogManager().getDataTypeFactory(); List<FunctionCallUtil.FunctionParam> convertedKeys = Arrays.stream(LookupJoinUtil.getOrderedLookupKeys(allLookupKeys.keySet())) .mapToObj(allLookupKeys::get) .collect(Collectors.toList()); GeneratedFunction<FlatMapFunction<RowData, RowData>> generatedFetcher = LookupJoinCodeGenerator.generateSyncLookupFunction( config, classLoader, dataTypeFactory, inputRowType, tableSourceRowType, resultRowType, convertedKeys, syncLookupFunction, StringUtils.join(temporalTable.getQualifiedName(), "."), isObjectReuseEnabled); RelDataType projectionOutputRelDataType = getProjectionOutputRelDataType(relBuilder); RowType rightRowType = getRightOutputRowType(projectionOutputRelDataType, tableSourceRowType); GeneratedCollector<ListenableCollector<RowData>> generatedCollector = LookupJoinCodeGenerator.generateCollector( new CodeGeneratorContext(config, classLoader), inputRowType, rightRowType, resultRowType, JavaScalaConversionUtil.toScala( Optional.ofNullable(remainingJoinCondition)), JavaScalaConversionUtil.toScala(Optional.empty()), true); GeneratedFilterCondition generatedPreFilterCondition = FilterCodeGenerator.generateFilterCondition( config, classLoader, preFilterCondition, inputRowType); ProcessFunction<RowData, RowData> processFunc; if (projectionOnTemporalTable != null) { // a projection or filter after table source scan GeneratedFunction<FlatMapFunction<RowData, RowData>> generatedCalc = LookupJoinCodeGenerator.generateCalcMapFunction( config, classLoader, JavaScalaConversionUtil.toScala(projectionOnTemporalTable), filterOnTemporalTable, projectionOutputRelDataType, tableSourceRowType); processFunc = new LookupJoinWithCalcRunner( generatedFetcher, generatedCalc, generatedCollector, generatedPreFilterCondition, isLeftOuterJoin, rightRowType.getFieldCount()); } else { // right type is the same as table source row type, because no calc after temporal table processFunc = new LookupJoinRunner( generatedFetcher, generatedCollector, generatedPreFilterCondition, isLeftOuterJoin, rightRowType.getFieldCount()); } return processFunc; } // ---------------------------------------------------------------------------------------- // Validation // ---------------------------------------------------------------------------------------- private void validate(RelOptTable temporalTable) { // validate table source and function implementation first validateTableSource(temporalTable); // check join on all fields of PRIMARY KEY or (UNIQUE) INDEX if (lookupKeys.isEmpty()) { throw new TableException( String.format( "Temporal table join requires an equality condition on fields of %s.", getTableSourceDescription(temporalTable))); } // check type if (joinType != FlinkJoinType.LEFT && joinType != FlinkJoinType.INNER) { throw new TableException( String.format( "Temporal table join currently only support INNER JOIN and LEFT JOIN, but was %s JOIN.", joinType.toString())); } // success } private String getTableSourceDescription(RelOptTable temporalTable) { if (temporalTable instanceof TableSourceTable) { return String.format( "table [%s]", ((TableSourceTable) temporalTable) .contextResolvedTable() .getIdentifier() .asSummaryString()); } else if (temporalTable instanceof LegacyTableSourceTable) { return String.format( "table [%s]", ((LegacyTableSourceTable<?>) temporalTable) .tableIdentifier() .asSummaryString()); } // should never reach here. return ""; } private void validateTableSource(RelOptTable temporalTable) { if (temporalTable instanceof TableSourceTable) { if (!(((TableSourceTable) temporalTable).tableSource() instanceof LookupTableSource)) { throw new TableException( String.format( "%s must implement LookupTableSource interface if it is used in temporal table join.", getTableSourceDescription(temporalTable))); } } else if (temporalTable instanceof LegacyTableSourceTable) { TableSource<?> tableSource = ((LegacyTableSourceTable<?>) temporalTable).tableSource(); if (!(tableSource instanceof LookupableTableSource)) { throw new TableException( String.format( "%s must implement LookupableTableSource interface if it is used in temporal table join.", getTableSourceDescription(temporalTable))); } TypeInformation<?> tableSourceProducedType = TypeInfoDataTypeConverter.fromDataTypeToTypeInfo( tableSource.getProducedDataType()); if (!(tableSourceProducedType instanceof InternalTypeInfo && tableSourceProducedType .getTypeClass() .isAssignableFrom(RowData.class)) && !(tableSourceProducedType instanceof RowTypeInfo)) { throw new TableException( String.format( "Temporal table join only support Row or RowData type as return type of temporal table. But was %s.", tableSourceProducedType)); } } else { throw new TableException( String.format( "table [%s] is neither TableSourceTable not LegacyTableSourceTable.", StringUtils.join(temporalTable.getQualifiedName(), "."))); } } }
googleapis/google-cloud-java
36,559
java-vision/proto-google-cloud-vision-v1p3beta1/src/main/java/com/google/cloud/vision/v1p3beta1/ListProductSetsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/vision/v1p3beta1/product_search_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.vision.v1p3beta1; /** * * * <pre> * Response message for the `ListProductSets` method. * </pre> * * Protobuf type {@code google.cloud.vision.v1p3beta1.ListProductSetsResponse} */ public final class ListProductSetsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.vision.v1p3beta1.ListProductSetsResponse) ListProductSetsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListProductSetsResponse.newBuilder() to construct. private ListProductSetsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListProductSetsResponse() { productSets_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListProductSetsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.vision.v1p3beta1.ProductSearchServiceProto .internal_static_google_cloud_vision_v1p3beta1_ListProductSetsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.vision.v1p3beta1.ProductSearchServiceProto .internal_static_google_cloud_vision_v1p3beta1_ListProductSetsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.vision.v1p3beta1.ListProductSetsResponse.class, com.google.cloud.vision.v1p3beta1.ListProductSetsResponse.Builder.class); } public static final int PRODUCT_SETS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.vision.v1p3beta1.ProductSet> productSets_; /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p3beta1.ProductSet product_sets = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.vision.v1p3beta1.ProductSet> getProductSetsList() { return productSets_; } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p3beta1.ProductSet product_sets = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.vision.v1p3beta1.ProductSetOrBuilder> getProductSetsOrBuilderList() { return productSets_; } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p3beta1.ProductSet product_sets = 1;</code> */ @java.lang.Override public int getProductSetsCount() { return productSets_.size(); } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p3beta1.ProductSet product_sets = 1;</code> */ @java.lang.Override public com.google.cloud.vision.v1p3beta1.ProductSet getProductSets(int index) { return productSets_.get(index); } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p3beta1.ProductSet product_sets = 1;</code> */ @java.lang.Override public com.google.cloud.vision.v1p3beta1.ProductSetOrBuilder getProductSetsOrBuilder(int index) { return productSets_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < productSets_.size(); i++) { output.writeMessage(1, productSets_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < productSets_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, productSets_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.vision.v1p3beta1.ListProductSetsResponse)) { return super.equals(obj); } com.google.cloud.vision.v1p3beta1.ListProductSetsResponse other = (com.google.cloud.vision.v1p3beta1.ListProductSetsResponse) obj; if (!getProductSetsList().equals(other.getProductSetsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getProductSetsCount() > 0) { hash = (37 * hash) + PRODUCT_SETS_FIELD_NUMBER; hash = (53 * hash) + getProductSetsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.vision.v1p3beta1.ListProductSetsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p3beta1.ListProductSetsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p3beta1.ListProductSetsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p3beta1.ListProductSetsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p3beta1.ListProductSetsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p3beta1.ListProductSetsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p3beta1.ListProductSetsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p3beta1.ListProductSetsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.vision.v1p3beta1.ListProductSetsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p3beta1.ListProductSetsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.vision.v1p3beta1.ListProductSetsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p3beta1.ListProductSetsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.vision.v1p3beta1.ListProductSetsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for the `ListProductSets` method. * </pre> * * Protobuf type {@code google.cloud.vision.v1p3beta1.ListProductSetsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.vision.v1p3beta1.ListProductSetsResponse) com.google.cloud.vision.v1p3beta1.ListProductSetsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.vision.v1p3beta1.ProductSearchServiceProto .internal_static_google_cloud_vision_v1p3beta1_ListProductSetsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.vision.v1p3beta1.ProductSearchServiceProto .internal_static_google_cloud_vision_v1p3beta1_ListProductSetsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.vision.v1p3beta1.ListProductSetsResponse.class, com.google.cloud.vision.v1p3beta1.ListProductSetsResponse.Builder.class); } // Construct using com.google.cloud.vision.v1p3beta1.ListProductSetsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (productSetsBuilder_ == null) { productSets_ = java.util.Collections.emptyList(); } else { productSets_ = null; productSetsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.vision.v1p3beta1.ProductSearchServiceProto .internal_static_google_cloud_vision_v1p3beta1_ListProductSetsResponse_descriptor; } @java.lang.Override public com.google.cloud.vision.v1p3beta1.ListProductSetsResponse getDefaultInstanceForType() { return com.google.cloud.vision.v1p3beta1.ListProductSetsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.vision.v1p3beta1.ListProductSetsResponse build() { com.google.cloud.vision.v1p3beta1.ListProductSetsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.vision.v1p3beta1.ListProductSetsResponse buildPartial() { com.google.cloud.vision.v1p3beta1.ListProductSetsResponse result = new com.google.cloud.vision.v1p3beta1.ListProductSetsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.vision.v1p3beta1.ListProductSetsResponse result) { if (productSetsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { productSets_ = java.util.Collections.unmodifiableList(productSets_); bitField0_ = (bitField0_ & ~0x00000001); } result.productSets_ = productSets_; } else { result.productSets_ = productSetsBuilder_.build(); } } private void buildPartial0(com.google.cloud.vision.v1p3beta1.ListProductSetsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.vision.v1p3beta1.ListProductSetsResponse) { return mergeFrom((com.google.cloud.vision.v1p3beta1.ListProductSetsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.vision.v1p3beta1.ListProductSetsResponse other) { if (other == com.google.cloud.vision.v1p3beta1.ListProductSetsResponse.getDefaultInstance()) return this; if (productSetsBuilder_ == null) { if (!other.productSets_.isEmpty()) { if (productSets_.isEmpty()) { productSets_ = other.productSets_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureProductSetsIsMutable(); productSets_.addAll(other.productSets_); } onChanged(); } } else { if (!other.productSets_.isEmpty()) { if (productSetsBuilder_.isEmpty()) { productSetsBuilder_.dispose(); productSetsBuilder_ = null; productSets_ = other.productSets_; bitField0_ = (bitField0_ & ~0x00000001); productSetsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getProductSetsFieldBuilder() : null; } else { productSetsBuilder_.addAllMessages(other.productSets_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.vision.v1p3beta1.ProductSet m = input.readMessage( com.google.cloud.vision.v1p3beta1.ProductSet.parser(), extensionRegistry); if (productSetsBuilder_ == null) { ensureProductSetsIsMutable(); productSets_.add(m); } else { productSetsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.vision.v1p3beta1.ProductSet> productSets_ = java.util.Collections.emptyList(); private void ensureProductSetsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { productSets_ = new java.util.ArrayList<com.google.cloud.vision.v1p3beta1.ProductSet>(productSets_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.vision.v1p3beta1.ProductSet, com.google.cloud.vision.v1p3beta1.ProductSet.Builder, com.google.cloud.vision.v1p3beta1.ProductSetOrBuilder> productSetsBuilder_; /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p3beta1.ProductSet product_sets = 1;</code> */ public java.util.List<com.google.cloud.vision.v1p3beta1.ProductSet> getProductSetsList() { if (productSetsBuilder_ == null) { return java.util.Collections.unmodifiableList(productSets_); } else { return productSetsBuilder_.getMessageList(); } } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p3beta1.ProductSet product_sets = 1;</code> */ public int getProductSetsCount() { if (productSetsBuilder_ == null) { return productSets_.size(); } else { return productSetsBuilder_.getCount(); } } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p3beta1.ProductSet product_sets = 1;</code> */ public com.google.cloud.vision.v1p3beta1.ProductSet getProductSets(int index) { if (productSetsBuilder_ == null) { return productSets_.get(index); } else { return productSetsBuilder_.getMessage(index); } } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p3beta1.ProductSet product_sets = 1;</code> */ public Builder setProductSets(int index, com.google.cloud.vision.v1p3beta1.ProductSet value) { if (productSetsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureProductSetsIsMutable(); productSets_.set(index, value); onChanged(); } else { productSetsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p3beta1.ProductSet product_sets = 1;</code> */ public Builder setProductSets( int index, com.google.cloud.vision.v1p3beta1.ProductSet.Builder builderForValue) { if (productSetsBuilder_ == null) { ensureProductSetsIsMutable(); productSets_.set(index, builderForValue.build()); onChanged(); } else { productSetsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p3beta1.ProductSet product_sets = 1;</code> */ public Builder addProductSets(com.google.cloud.vision.v1p3beta1.ProductSet value) { if (productSetsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureProductSetsIsMutable(); productSets_.add(value); onChanged(); } else { productSetsBuilder_.addMessage(value); } return this; } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p3beta1.ProductSet product_sets = 1;</code> */ public Builder addProductSets(int index, com.google.cloud.vision.v1p3beta1.ProductSet value) { if (productSetsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureProductSetsIsMutable(); productSets_.add(index, value); onChanged(); } else { productSetsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p3beta1.ProductSet product_sets = 1;</code> */ public Builder addProductSets( com.google.cloud.vision.v1p3beta1.ProductSet.Builder builderForValue) { if (productSetsBuilder_ == null) { ensureProductSetsIsMutable(); productSets_.add(builderForValue.build()); onChanged(); } else { productSetsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p3beta1.ProductSet product_sets = 1;</code> */ public Builder addProductSets( int index, com.google.cloud.vision.v1p3beta1.ProductSet.Builder builderForValue) { if (productSetsBuilder_ == null) { ensureProductSetsIsMutable(); productSets_.add(index, builderForValue.build()); onChanged(); } else { productSetsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p3beta1.ProductSet product_sets = 1;</code> */ public Builder addAllProductSets( java.lang.Iterable<? extends com.google.cloud.vision.v1p3beta1.ProductSet> values) { if (productSetsBuilder_ == null) { ensureProductSetsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, productSets_); onChanged(); } else { productSetsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p3beta1.ProductSet product_sets = 1;</code> */ public Builder clearProductSets() { if (productSetsBuilder_ == null) { productSets_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { productSetsBuilder_.clear(); } return this; } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p3beta1.ProductSet product_sets = 1;</code> */ public Builder removeProductSets(int index) { if (productSetsBuilder_ == null) { ensureProductSetsIsMutable(); productSets_.remove(index); onChanged(); } else { productSetsBuilder_.remove(index); } return this; } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p3beta1.ProductSet product_sets = 1;</code> */ public com.google.cloud.vision.v1p3beta1.ProductSet.Builder getProductSetsBuilder(int index) { return getProductSetsFieldBuilder().getBuilder(index); } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p3beta1.ProductSet product_sets = 1;</code> */ public com.google.cloud.vision.v1p3beta1.ProductSetOrBuilder getProductSetsOrBuilder( int index) { if (productSetsBuilder_ == null) { return productSets_.get(index); } else { return productSetsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p3beta1.ProductSet product_sets = 1;</code> */ public java.util.List<? extends com.google.cloud.vision.v1p3beta1.ProductSetOrBuilder> getProductSetsOrBuilderList() { if (productSetsBuilder_ != null) { return productSetsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(productSets_); } } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p3beta1.ProductSet product_sets = 1;</code> */ public com.google.cloud.vision.v1p3beta1.ProductSet.Builder addProductSetsBuilder() { return getProductSetsFieldBuilder() .addBuilder(com.google.cloud.vision.v1p3beta1.ProductSet.getDefaultInstance()); } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p3beta1.ProductSet product_sets = 1;</code> */ public com.google.cloud.vision.v1p3beta1.ProductSet.Builder addProductSetsBuilder(int index) { return getProductSetsFieldBuilder() .addBuilder(index, com.google.cloud.vision.v1p3beta1.ProductSet.getDefaultInstance()); } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p3beta1.ProductSet product_sets = 1;</code> */ public java.util.List<com.google.cloud.vision.v1p3beta1.ProductSet.Builder> getProductSetsBuilderList() { return getProductSetsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.vision.v1p3beta1.ProductSet, com.google.cloud.vision.v1p3beta1.ProductSet.Builder, com.google.cloud.vision.v1p3beta1.ProductSetOrBuilder> getProductSetsFieldBuilder() { if (productSetsBuilder_ == null) { productSetsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.vision.v1p3beta1.ProductSet, com.google.cloud.vision.v1p3beta1.ProductSet.Builder, com.google.cloud.vision.v1p3beta1.ProductSetOrBuilder>( productSets_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); productSets_ = null; } return productSetsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.vision.v1p3beta1.ListProductSetsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.vision.v1p3beta1.ListProductSetsResponse) private static final com.google.cloud.vision.v1p3beta1.ListProductSetsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.vision.v1p3beta1.ListProductSetsResponse(); } public static com.google.cloud.vision.v1p3beta1.ListProductSetsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListProductSetsResponse> PARSER = new com.google.protobuf.AbstractParser<ListProductSetsResponse>() { @java.lang.Override public ListProductSetsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListProductSetsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListProductSetsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.vision.v1p3beta1.ListProductSetsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,559
java-vision/proto-google-cloud-vision-v1p4beta1/src/main/java/com/google/cloud/vision/v1p4beta1/ListProductSetsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/vision/v1p4beta1/product_search_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.vision.v1p4beta1; /** * * * <pre> * Response message for the `ListProductSets` method. * </pre> * * Protobuf type {@code google.cloud.vision.v1p4beta1.ListProductSetsResponse} */ public final class ListProductSetsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.vision.v1p4beta1.ListProductSetsResponse) ListProductSetsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListProductSetsResponse.newBuilder() to construct. private ListProductSetsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListProductSetsResponse() { productSets_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListProductSetsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.vision.v1p4beta1.ProductSearchServiceProto .internal_static_google_cloud_vision_v1p4beta1_ListProductSetsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.vision.v1p4beta1.ProductSearchServiceProto .internal_static_google_cloud_vision_v1p4beta1_ListProductSetsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.vision.v1p4beta1.ListProductSetsResponse.class, com.google.cloud.vision.v1p4beta1.ListProductSetsResponse.Builder.class); } public static final int PRODUCT_SETS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.vision.v1p4beta1.ProductSet> productSets_; /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.ProductSet product_sets = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.vision.v1p4beta1.ProductSet> getProductSetsList() { return productSets_; } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.ProductSet product_sets = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.vision.v1p4beta1.ProductSetOrBuilder> getProductSetsOrBuilderList() { return productSets_; } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.ProductSet product_sets = 1;</code> */ @java.lang.Override public int getProductSetsCount() { return productSets_.size(); } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.ProductSet product_sets = 1;</code> */ @java.lang.Override public com.google.cloud.vision.v1p4beta1.ProductSet getProductSets(int index) { return productSets_.get(index); } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.ProductSet product_sets = 1;</code> */ @java.lang.Override public com.google.cloud.vision.v1p4beta1.ProductSetOrBuilder getProductSetsOrBuilder(int index) { return productSets_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < productSets_.size(); i++) { output.writeMessage(1, productSets_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < productSets_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, productSets_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.vision.v1p4beta1.ListProductSetsResponse)) { return super.equals(obj); } com.google.cloud.vision.v1p4beta1.ListProductSetsResponse other = (com.google.cloud.vision.v1p4beta1.ListProductSetsResponse) obj; if (!getProductSetsList().equals(other.getProductSetsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getProductSetsCount() > 0) { hash = (37 * hash) + PRODUCT_SETS_FIELD_NUMBER; hash = (53 * hash) + getProductSetsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.vision.v1p4beta1.ListProductSetsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p4beta1.ListProductSetsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p4beta1.ListProductSetsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p4beta1.ListProductSetsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p4beta1.ListProductSetsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p4beta1.ListProductSetsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p4beta1.ListProductSetsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p4beta1.ListProductSetsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.vision.v1p4beta1.ListProductSetsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p4beta1.ListProductSetsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.vision.v1p4beta1.ListProductSetsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p4beta1.ListProductSetsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.vision.v1p4beta1.ListProductSetsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for the `ListProductSets` method. * </pre> * * Protobuf type {@code google.cloud.vision.v1p4beta1.ListProductSetsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.vision.v1p4beta1.ListProductSetsResponse) com.google.cloud.vision.v1p4beta1.ListProductSetsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.vision.v1p4beta1.ProductSearchServiceProto .internal_static_google_cloud_vision_v1p4beta1_ListProductSetsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.vision.v1p4beta1.ProductSearchServiceProto .internal_static_google_cloud_vision_v1p4beta1_ListProductSetsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.vision.v1p4beta1.ListProductSetsResponse.class, com.google.cloud.vision.v1p4beta1.ListProductSetsResponse.Builder.class); } // Construct using com.google.cloud.vision.v1p4beta1.ListProductSetsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (productSetsBuilder_ == null) { productSets_ = java.util.Collections.emptyList(); } else { productSets_ = null; productSetsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.vision.v1p4beta1.ProductSearchServiceProto .internal_static_google_cloud_vision_v1p4beta1_ListProductSetsResponse_descriptor; } @java.lang.Override public com.google.cloud.vision.v1p4beta1.ListProductSetsResponse getDefaultInstanceForType() { return com.google.cloud.vision.v1p4beta1.ListProductSetsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.vision.v1p4beta1.ListProductSetsResponse build() { com.google.cloud.vision.v1p4beta1.ListProductSetsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.vision.v1p4beta1.ListProductSetsResponse buildPartial() { com.google.cloud.vision.v1p4beta1.ListProductSetsResponse result = new com.google.cloud.vision.v1p4beta1.ListProductSetsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.vision.v1p4beta1.ListProductSetsResponse result) { if (productSetsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { productSets_ = java.util.Collections.unmodifiableList(productSets_); bitField0_ = (bitField0_ & ~0x00000001); } result.productSets_ = productSets_; } else { result.productSets_ = productSetsBuilder_.build(); } } private void buildPartial0(com.google.cloud.vision.v1p4beta1.ListProductSetsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.vision.v1p4beta1.ListProductSetsResponse) { return mergeFrom((com.google.cloud.vision.v1p4beta1.ListProductSetsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.vision.v1p4beta1.ListProductSetsResponse other) { if (other == com.google.cloud.vision.v1p4beta1.ListProductSetsResponse.getDefaultInstance()) return this; if (productSetsBuilder_ == null) { if (!other.productSets_.isEmpty()) { if (productSets_.isEmpty()) { productSets_ = other.productSets_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureProductSetsIsMutable(); productSets_.addAll(other.productSets_); } onChanged(); } } else { if (!other.productSets_.isEmpty()) { if (productSetsBuilder_.isEmpty()) { productSetsBuilder_.dispose(); productSetsBuilder_ = null; productSets_ = other.productSets_; bitField0_ = (bitField0_ & ~0x00000001); productSetsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getProductSetsFieldBuilder() : null; } else { productSetsBuilder_.addAllMessages(other.productSets_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.vision.v1p4beta1.ProductSet m = input.readMessage( com.google.cloud.vision.v1p4beta1.ProductSet.parser(), extensionRegistry); if (productSetsBuilder_ == null) { ensureProductSetsIsMutable(); productSets_.add(m); } else { productSetsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.vision.v1p4beta1.ProductSet> productSets_ = java.util.Collections.emptyList(); private void ensureProductSetsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { productSets_ = new java.util.ArrayList<com.google.cloud.vision.v1p4beta1.ProductSet>(productSets_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.vision.v1p4beta1.ProductSet, com.google.cloud.vision.v1p4beta1.ProductSet.Builder, com.google.cloud.vision.v1p4beta1.ProductSetOrBuilder> productSetsBuilder_; /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.ProductSet product_sets = 1;</code> */ public java.util.List<com.google.cloud.vision.v1p4beta1.ProductSet> getProductSetsList() { if (productSetsBuilder_ == null) { return java.util.Collections.unmodifiableList(productSets_); } else { return productSetsBuilder_.getMessageList(); } } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.ProductSet product_sets = 1;</code> */ public int getProductSetsCount() { if (productSetsBuilder_ == null) { return productSets_.size(); } else { return productSetsBuilder_.getCount(); } } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.ProductSet product_sets = 1;</code> */ public com.google.cloud.vision.v1p4beta1.ProductSet getProductSets(int index) { if (productSetsBuilder_ == null) { return productSets_.get(index); } else { return productSetsBuilder_.getMessage(index); } } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.ProductSet product_sets = 1;</code> */ public Builder setProductSets(int index, com.google.cloud.vision.v1p4beta1.ProductSet value) { if (productSetsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureProductSetsIsMutable(); productSets_.set(index, value); onChanged(); } else { productSetsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.ProductSet product_sets = 1;</code> */ public Builder setProductSets( int index, com.google.cloud.vision.v1p4beta1.ProductSet.Builder builderForValue) { if (productSetsBuilder_ == null) { ensureProductSetsIsMutable(); productSets_.set(index, builderForValue.build()); onChanged(); } else { productSetsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.ProductSet product_sets = 1;</code> */ public Builder addProductSets(com.google.cloud.vision.v1p4beta1.ProductSet value) { if (productSetsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureProductSetsIsMutable(); productSets_.add(value); onChanged(); } else { productSetsBuilder_.addMessage(value); } return this; } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.ProductSet product_sets = 1;</code> */ public Builder addProductSets(int index, com.google.cloud.vision.v1p4beta1.ProductSet value) { if (productSetsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureProductSetsIsMutable(); productSets_.add(index, value); onChanged(); } else { productSetsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.ProductSet product_sets = 1;</code> */ public Builder addProductSets( com.google.cloud.vision.v1p4beta1.ProductSet.Builder builderForValue) { if (productSetsBuilder_ == null) { ensureProductSetsIsMutable(); productSets_.add(builderForValue.build()); onChanged(); } else { productSetsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.ProductSet product_sets = 1;</code> */ public Builder addProductSets( int index, com.google.cloud.vision.v1p4beta1.ProductSet.Builder builderForValue) { if (productSetsBuilder_ == null) { ensureProductSetsIsMutable(); productSets_.add(index, builderForValue.build()); onChanged(); } else { productSetsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.ProductSet product_sets = 1;</code> */ public Builder addAllProductSets( java.lang.Iterable<? extends com.google.cloud.vision.v1p4beta1.ProductSet> values) { if (productSetsBuilder_ == null) { ensureProductSetsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, productSets_); onChanged(); } else { productSetsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.ProductSet product_sets = 1;</code> */ public Builder clearProductSets() { if (productSetsBuilder_ == null) { productSets_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { productSetsBuilder_.clear(); } return this; } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.ProductSet product_sets = 1;</code> */ public Builder removeProductSets(int index) { if (productSetsBuilder_ == null) { ensureProductSetsIsMutable(); productSets_.remove(index); onChanged(); } else { productSetsBuilder_.remove(index); } return this; } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.ProductSet product_sets = 1;</code> */ public com.google.cloud.vision.v1p4beta1.ProductSet.Builder getProductSetsBuilder(int index) { return getProductSetsFieldBuilder().getBuilder(index); } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.ProductSet product_sets = 1;</code> */ public com.google.cloud.vision.v1p4beta1.ProductSetOrBuilder getProductSetsOrBuilder( int index) { if (productSetsBuilder_ == null) { return productSets_.get(index); } else { return productSetsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.ProductSet product_sets = 1;</code> */ public java.util.List<? extends com.google.cloud.vision.v1p4beta1.ProductSetOrBuilder> getProductSetsOrBuilderList() { if (productSetsBuilder_ != null) { return productSetsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(productSets_); } } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.ProductSet product_sets = 1;</code> */ public com.google.cloud.vision.v1p4beta1.ProductSet.Builder addProductSetsBuilder() { return getProductSetsFieldBuilder() .addBuilder(com.google.cloud.vision.v1p4beta1.ProductSet.getDefaultInstance()); } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.ProductSet product_sets = 1;</code> */ public com.google.cloud.vision.v1p4beta1.ProductSet.Builder addProductSetsBuilder(int index) { return getProductSetsFieldBuilder() .addBuilder(index, com.google.cloud.vision.v1p4beta1.ProductSet.getDefaultInstance()); } /** * * * <pre> * List of ProductSets. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.ProductSet product_sets = 1;</code> */ public java.util.List<com.google.cloud.vision.v1p4beta1.ProductSet.Builder> getProductSetsBuilderList() { return getProductSetsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.vision.v1p4beta1.ProductSet, com.google.cloud.vision.v1p4beta1.ProductSet.Builder, com.google.cloud.vision.v1p4beta1.ProductSetOrBuilder> getProductSetsFieldBuilder() { if (productSetsBuilder_ == null) { productSetsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.vision.v1p4beta1.ProductSet, com.google.cloud.vision.v1p4beta1.ProductSet.Builder, com.google.cloud.vision.v1p4beta1.ProductSetOrBuilder>( productSets_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); productSets_ = null; } return productSetsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.vision.v1p4beta1.ListProductSetsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.ListProductSetsResponse) private static final com.google.cloud.vision.v1p4beta1.ListProductSetsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.vision.v1p4beta1.ListProductSetsResponse(); } public static com.google.cloud.vision.v1p4beta1.ListProductSetsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListProductSetsResponse> PARSER = new com.google.protobuf.AbstractParser<ListProductSetsResponse>() { @java.lang.Override public ListProductSetsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListProductSetsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListProductSetsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.vision.v1p4beta1.ListProductSetsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,570
java-visionai/proto-google-cloud-visionai-v1/src/main/java/com/google/cloud/visionai/v1/ListCollectionsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/visionai/v1/warehouse.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.visionai.v1; /** * * * <pre> * Response message for ListCollections. * </pre> * * Protobuf type {@code google.cloud.visionai.v1.ListCollectionsResponse} */ public final class ListCollectionsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.visionai.v1.ListCollectionsResponse) ListCollectionsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListCollectionsResponse.newBuilder() to construct. private ListCollectionsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListCollectionsResponse() { collections_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListCollectionsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_ListCollectionsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_ListCollectionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.visionai.v1.ListCollectionsResponse.class, com.google.cloud.visionai.v1.ListCollectionsResponse.Builder.class); } public static final int COLLECTIONS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.visionai.v1.Collection> collections_; /** * * * <pre> * The collections from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Collection collections = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.visionai.v1.Collection> getCollectionsList() { return collections_; } /** * * * <pre> * The collections from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Collection collections = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.visionai.v1.CollectionOrBuilder> getCollectionsOrBuilderList() { return collections_; } /** * * * <pre> * The collections from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Collection collections = 1;</code> */ @java.lang.Override public int getCollectionsCount() { return collections_.size(); } /** * * * <pre> * The collections from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Collection collections = 1;</code> */ @java.lang.Override public com.google.cloud.visionai.v1.Collection getCollections(int index) { return collections_.get(index); } /** * * * <pre> * The collections from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Collection collections = 1;</code> */ @java.lang.Override public com.google.cloud.visionai.v1.CollectionOrBuilder getCollectionsOrBuilder(int index) { return collections_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < collections_.size(); i++) { output.writeMessage(1, collections_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < collections_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, collections_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.visionai.v1.ListCollectionsResponse)) { return super.equals(obj); } com.google.cloud.visionai.v1.ListCollectionsResponse other = (com.google.cloud.visionai.v1.ListCollectionsResponse) obj; if (!getCollectionsList().equals(other.getCollectionsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getCollectionsCount() > 0) { hash = (37 * hash) + COLLECTIONS_FIELD_NUMBER; hash = (53 * hash) + getCollectionsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.visionai.v1.ListCollectionsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.ListCollectionsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.ListCollectionsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.ListCollectionsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.ListCollectionsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.ListCollectionsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.ListCollectionsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.ListCollectionsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.visionai.v1.ListCollectionsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.ListCollectionsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.visionai.v1.ListCollectionsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.ListCollectionsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.visionai.v1.ListCollectionsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for ListCollections. * </pre> * * Protobuf type {@code google.cloud.visionai.v1.ListCollectionsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.visionai.v1.ListCollectionsResponse) com.google.cloud.visionai.v1.ListCollectionsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_ListCollectionsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_ListCollectionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.visionai.v1.ListCollectionsResponse.class, com.google.cloud.visionai.v1.ListCollectionsResponse.Builder.class); } // Construct using com.google.cloud.visionai.v1.ListCollectionsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (collectionsBuilder_ == null) { collections_ = java.util.Collections.emptyList(); } else { collections_ = null; collectionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_ListCollectionsResponse_descriptor; } @java.lang.Override public com.google.cloud.visionai.v1.ListCollectionsResponse getDefaultInstanceForType() { return com.google.cloud.visionai.v1.ListCollectionsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.visionai.v1.ListCollectionsResponse build() { com.google.cloud.visionai.v1.ListCollectionsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.visionai.v1.ListCollectionsResponse buildPartial() { com.google.cloud.visionai.v1.ListCollectionsResponse result = new com.google.cloud.visionai.v1.ListCollectionsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.visionai.v1.ListCollectionsResponse result) { if (collectionsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { collections_ = java.util.Collections.unmodifiableList(collections_); bitField0_ = (bitField0_ & ~0x00000001); } result.collections_ = collections_; } else { result.collections_ = collectionsBuilder_.build(); } } private void buildPartial0(com.google.cloud.visionai.v1.ListCollectionsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.visionai.v1.ListCollectionsResponse) { return mergeFrom((com.google.cloud.visionai.v1.ListCollectionsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.visionai.v1.ListCollectionsResponse other) { if (other == com.google.cloud.visionai.v1.ListCollectionsResponse.getDefaultInstance()) return this; if (collectionsBuilder_ == null) { if (!other.collections_.isEmpty()) { if (collections_.isEmpty()) { collections_ = other.collections_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureCollectionsIsMutable(); collections_.addAll(other.collections_); } onChanged(); } } else { if (!other.collections_.isEmpty()) { if (collectionsBuilder_.isEmpty()) { collectionsBuilder_.dispose(); collectionsBuilder_ = null; collections_ = other.collections_; bitField0_ = (bitField0_ & ~0x00000001); collectionsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getCollectionsFieldBuilder() : null; } else { collectionsBuilder_.addAllMessages(other.collections_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.visionai.v1.Collection m = input.readMessage( com.google.cloud.visionai.v1.Collection.parser(), extensionRegistry); if (collectionsBuilder_ == null) { ensureCollectionsIsMutable(); collections_.add(m); } else { collectionsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.visionai.v1.Collection> collections_ = java.util.Collections.emptyList(); private void ensureCollectionsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { collections_ = new java.util.ArrayList<com.google.cloud.visionai.v1.Collection>(collections_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.visionai.v1.Collection, com.google.cloud.visionai.v1.Collection.Builder, com.google.cloud.visionai.v1.CollectionOrBuilder> collectionsBuilder_; /** * * * <pre> * The collections from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Collection collections = 1;</code> */ public java.util.List<com.google.cloud.visionai.v1.Collection> getCollectionsList() { if (collectionsBuilder_ == null) { return java.util.Collections.unmodifiableList(collections_); } else { return collectionsBuilder_.getMessageList(); } } /** * * * <pre> * The collections from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Collection collections = 1;</code> */ public int getCollectionsCount() { if (collectionsBuilder_ == null) { return collections_.size(); } else { return collectionsBuilder_.getCount(); } } /** * * * <pre> * The collections from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Collection collections = 1;</code> */ public com.google.cloud.visionai.v1.Collection getCollections(int index) { if (collectionsBuilder_ == null) { return collections_.get(index); } else { return collectionsBuilder_.getMessage(index); } } /** * * * <pre> * The collections from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Collection collections = 1;</code> */ public Builder setCollections(int index, com.google.cloud.visionai.v1.Collection value) { if (collectionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCollectionsIsMutable(); collections_.set(index, value); onChanged(); } else { collectionsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The collections from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Collection collections = 1;</code> */ public Builder setCollections( int index, com.google.cloud.visionai.v1.Collection.Builder builderForValue) { if (collectionsBuilder_ == null) { ensureCollectionsIsMutable(); collections_.set(index, builderForValue.build()); onChanged(); } else { collectionsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The collections from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Collection collections = 1;</code> */ public Builder addCollections(com.google.cloud.visionai.v1.Collection value) { if (collectionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCollectionsIsMutable(); collections_.add(value); onChanged(); } else { collectionsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The collections from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Collection collections = 1;</code> */ public Builder addCollections(int index, com.google.cloud.visionai.v1.Collection value) { if (collectionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCollectionsIsMutable(); collections_.add(index, value); onChanged(); } else { collectionsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The collections from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Collection collections = 1;</code> */ public Builder addCollections(com.google.cloud.visionai.v1.Collection.Builder builderForValue) { if (collectionsBuilder_ == null) { ensureCollectionsIsMutable(); collections_.add(builderForValue.build()); onChanged(); } else { collectionsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The collections from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Collection collections = 1;</code> */ public Builder addCollections( int index, com.google.cloud.visionai.v1.Collection.Builder builderForValue) { if (collectionsBuilder_ == null) { ensureCollectionsIsMutable(); collections_.add(index, builderForValue.build()); onChanged(); } else { collectionsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The collections from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Collection collections = 1;</code> */ public Builder addAllCollections( java.lang.Iterable<? extends com.google.cloud.visionai.v1.Collection> values) { if (collectionsBuilder_ == null) { ensureCollectionsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, collections_); onChanged(); } else { collectionsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The collections from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Collection collections = 1;</code> */ public Builder clearCollections() { if (collectionsBuilder_ == null) { collections_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { collectionsBuilder_.clear(); } return this; } /** * * * <pre> * The collections from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Collection collections = 1;</code> */ public Builder removeCollections(int index) { if (collectionsBuilder_ == null) { ensureCollectionsIsMutable(); collections_.remove(index); onChanged(); } else { collectionsBuilder_.remove(index); } return this; } /** * * * <pre> * The collections from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Collection collections = 1;</code> */ public com.google.cloud.visionai.v1.Collection.Builder getCollectionsBuilder(int index) { return getCollectionsFieldBuilder().getBuilder(index); } /** * * * <pre> * The collections from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Collection collections = 1;</code> */ public com.google.cloud.visionai.v1.CollectionOrBuilder getCollectionsOrBuilder(int index) { if (collectionsBuilder_ == null) { return collections_.get(index); } else { return collectionsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The collections from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Collection collections = 1;</code> */ public java.util.List<? extends com.google.cloud.visionai.v1.CollectionOrBuilder> getCollectionsOrBuilderList() { if (collectionsBuilder_ != null) { return collectionsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(collections_); } } /** * * * <pre> * The collections from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Collection collections = 1;</code> */ public com.google.cloud.visionai.v1.Collection.Builder addCollectionsBuilder() { return getCollectionsFieldBuilder() .addBuilder(com.google.cloud.visionai.v1.Collection.getDefaultInstance()); } /** * * * <pre> * The collections from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Collection collections = 1;</code> */ public com.google.cloud.visionai.v1.Collection.Builder addCollectionsBuilder(int index) { return getCollectionsFieldBuilder() .addBuilder(index, com.google.cloud.visionai.v1.Collection.getDefaultInstance()); } /** * * * <pre> * The collections from the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Collection collections = 1;</code> */ public java.util.List<com.google.cloud.visionai.v1.Collection.Builder> getCollectionsBuilderList() { return getCollectionsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.visionai.v1.Collection, com.google.cloud.visionai.v1.Collection.Builder, com.google.cloud.visionai.v1.CollectionOrBuilder> getCollectionsFieldBuilder() { if (collectionsBuilder_ == null) { collectionsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.visionai.v1.Collection, com.google.cloud.visionai.v1.Collection.Builder, com.google.cloud.visionai.v1.CollectionOrBuilder>( collections_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); collections_ = null; } return collectionsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.visionai.v1.ListCollectionsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.visionai.v1.ListCollectionsResponse) private static final com.google.cloud.visionai.v1.ListCollectionsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.visionai.v1.ListCollectionsResponse(); } public static com.google.cloud.visionai.v1.ListCollectionsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListCollectionsResponse> PARSER = new com.google.protobuf.AbstractParser<ListCollectionsResponse>() { @java.lang.Override public ListCollectionsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListCollectionsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListCollectionsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.visionai.v1.ListCollectionsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-api-java-client-services
36,763
clients/google-api-services-iamcredentials/v1/1.30.1/com/google/api/services/iamcredentials/v1/IAMCredentials.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.iamcredentials.v1; /** * Service definition for IAMCredentials (v1). * * <p> * Creates short-lived credentials for impersonating IAM service accounts. To enable this API, you must enable the IAM API (iam.googleapis.com). * </p> * * <p> * For more information about this service, see the * <a href="https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials" target="_blank">API Documentation</a> * </p> * * <p> * This service uses {@link IAMCredentialsRequestInitializer} to initialize global parameters via its * {@link Builder}. * </p> * * @since 1.3 * @author Google, Inc. */ @SuppressWarnings("javadoc") public class IAMCredentials extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient { // Note: Leave this static initializer at the top of the file. static { com.google.api.client.util.Preconditions.checkState( com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION == 1 && com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION >= 15, "You are currently running with version %s of google-api-client. " + "You need at least version 1.15 of google-api-client to run version " + "1.30.10 of the IAM Service Account Credentials API library.", com.google.api.client.googleapis.GoogleUtils.VERSION); } /** * The default encoded root URL of the service. This is determined when the library is generated * and normally should not be changed. * * @since 1.7 */ public static final String DEFAULT_ROOT_URL = "https://iamcredentials.googleapis.com/"; /** * The default encoded service path of the service. This is determined when the library is * generated and normally should not be changed. * * @since 1.7 */ public static final String DEFAULT_SERVICE_PATH = ""; /** * The default encoded batch path of the service. This is determined when the library is * generated and normally should not be changed. * * @since 1.23 */ public static final String DEFAULT_BATCH_PATH = "batch"; /** * The default encoded base URL of the service. This is determined when the library is generated * and normally should not be changed. */ public static final String DEFAULT_BASE_URL = DEFAULT_ROOT_URL + DEFAULT_SERVICE_PATH; /** * Constructor. * * <p> * Use {@link Builder} if you need to specify any of the optional parameters. * </p> * * @param transport HTTP transport, which should normally be: * <ul> * <li>Google App Engine: * {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li> * <li>Android: {@code newCompatibleTransport} from * {@code com.google.api.client.extensions.android.http.AndroidHttp}</li> * <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()} * </li> * </ul> * @param jsonFactory JSON factory, which may be: * <ul> * <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li> * <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li> * <li>Android Honeycomb or higher: * {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li> * </ul> * @param httpRequestInitializer HTTP request initializer or {@code null} for none * @since 1.7 */ public IAMCredentials(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory, com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { this(new Builder(transport, jsonFactory, httpRequestInitializer)); } /** * @param builder builder */ IAMCredentials(Builder builder) { super(builder); } @Override protected void initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest<?> httpClientRequest) throws java.io.IOException { super.initialize(httpClientRequest); } /** * An accessor for creating requests from the Projects collection. * * <p>The typical use is:</p> * <pre> * {@code IAMCredentials iamcredentials = new IAMCredentials(...);} * {@code IAMCredentials.Projects.List request = iamcredentials.projects().list(parameters ...)} * </pre> * * @return the resource collection */ public Projects projects() { return new Projects(); } /** * The "projects" collection of methods. */ public class Projects { /** * An accessor for creating requests from the ServiceAccounts collection. * * <p>The typical use is:</p> * <pre> * {@code IAMCredentials iamcredentials = new IAMCredentials(...);} * {@code IAMCredentials.ServiceAccounts.List request = iamcredentials.serviceAccounts().list(parameters ...)} * </pre> * * @return the resource collection */ public ServiceAccounts serviceAccounts() { return new ServiceAccounts(); } /** * The "serviceAccounts" collection of methods. */ public class ServiceAccounts { /** * Generates an OAuth 2.0 access token for a service account. * * Create a request for the method "serviceAccounts.generateAccessToken". * * This request holds the parameters needed by the iamcredentials server. After setting any * optional parameters, call the {@link GenerateAccessToken#execute()} method to invoke the remote * operation. * * @param name Required. The resource name of the service account for which the credentials are requested, in the * following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` * wildcard character is required; replacing it with a project ID is invalid. * @param content the {@link com.google.api.services.iamcredentials.v1.model.GenerateAccessTokenRequest} * @return the request */ public GenerateAccessToken generateAccessToken(java.lang.String name, com.google.api.services.iamcredentials.v1.model.GenerateAccessTokenRequest content) throws java.io.IOException { GenerateAccessToken result = new GenerateAccessToken(name, content); initialize(result); return result; } public class GenerateAccessToken extends IAMCredentialsRequest<com.google.api.services.iamcredentials.v1.model.GenerateAccessTokenResponse> { private static final String REST_PATH = "v1/{+name}:generateAccessToken"; private final java.util.regex.Pattern NAME_PATTERN = java.util.regex.Pattern.compile("^projects/[^/]+/serviceAccounts/[^/]+$"); /** * Generates an OAuth 2.0 access token for a service account. * * Create a request for the method "serviceAccounts.generateAccessToken". * * This request holds the parameters needed by the the iamcredentials server. After setting any * optional parameters, call the {@link GenerateAccessToken#execute()} method to invoke the remote * operation. <p> {@link GenerateAccessToken#initialize(com.google.api.client.googleapis.services. * AbstractGoogleClientRequest)} must be called to initialize this instance immediately after * invoking the constructor. </p> * * @param name Required. The resource name of the service account for which the credentials are requested, in the * following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` * wildcard character is required; replacing it with a project ID is invalid. * @param content the {@link com.google.api.services.iamcredentials.v1.model.GenerateAccessTokenRequest} * @since 1.13 */ protected GenerateAccessToken(java.lang.String name, com.google.api.services.iamcredentials.v1.model.GenerateAccessTokenRequest content) { super(IAMCredentials.this, "POST", REST_PATH, content, com.google.api.services.iamcredentials.v1.model.GenerateAccessTokenResponse.class); this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified."); if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/serviceAccounts/[^/]+$"); } } @Override public GenerateAccessToken set$Xgafv(java.lang.String $Xgafv) { return (GenerateAccessToken) super.set$Xgafv($Xgafv); } @Override public GenerateAccessToken setAccessToken(java.lang.String accessToken) { return (GenerateAccessToken) super.setAccessToken(accessToken); } @Override public GenerateAccessToken setAlt(java.lang.String alt) { return (GenerateAccessToken) super.setAlt(alt); } @Override public GenerateAccessToken setCallback(java.lang.String callback) { return (GenerateAccessToken) super.setCallback(callback); } @Override public GenerateAccessToken setFields(java.lang.String fields) { return (GenerateAccessToken) super.setFields(fields); } @Override public GenerateAccessToken setKey(java.lang.String key) { return (GenerateAccessToken) super.setKey(key); } @Override public GenerateAccessToken setOauthToken(java.lang.String oauthToken) { return (GenerateAccessToken) super.setOauthToken(oauthToken); } @Override public GenerateAccessToken setPrettyPrint(java.lang.Boolean prettyPrint) { return (GenerateAccessToken) super.setPrettyPrint(prettyPrint); } @Override public GenerateAccessToken setQuotaUser(java.lang.String quotaUser) { return (GenerateAccessToken) super.setQuotaUser(quotaUser); } @Override public GenerateAccessToken setUploadType(java.lang.String uploadType) { return (GenerateAccessToken) super.setUploadType(uploadType); } @Override public GenerateAccessToken setUploadProtocol(java.lang.String uploadProtocol) { return (GenerateAccessToken) super.setUploadProtocol(uploadProtocol); } /** * Required. The resource name of the service account for which the credentials are * requested, in the following format: * `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is * required; replacing it with a project ID is invalid. */ @com.google.api.client.util.Key private java.lang.String name; /** Required. The resource name of the service account for which the credentials are requested, in the following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is required; replacing it with a project ID is invalid. */ public java.lang.String getName() { return name; } /** * Required. The resource name of the service account for which the credentials are * requested, in the following format: * `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is * required; replacing it with a project ID is invalid. */ public GenerateAccessToken setName(java.lang.String name) { if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/serviceAccounts/[^/]+$"); } this.name = name; return this; } @Override public GenerateAccessToken set(String parameterName, Object value) { return (GenerateAccessToken) super.set(parameterName, value); } } /** * Generates an OpenID Connect ID token for a service account. * * Create a request for the method "serviceAccounts.generateIdToken". * * This request holds the parameters needed by the iamcredentials server. After setting any * optional parameters, call the {@link GenerateIdToken#execute()} method to invoke the remote * operation. * * @param name Required. The resource name of the service account for which the credentials are requested, in the * following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` * wildcard character is required; replacing it with a project ID is invalid. * @param content the {@link com.google.api.services.iamcredentials.v1.model.GenerateIdTokenRequest} * @return the request */ public GenerateIdToken generateIdToken(java.lang.String name, com.google.api.services.iamcredentials.v1.model.GenerateIdTokenRequest content) throws java.io.IOException { GenerateIdToken result = new GenerateIdToken(name, content); initialize(result); return result; } public class GenerateIdToken extends IAMCredentialsRequest<com.google.api.services.iamcredentials.v1.model.GenerateIdTokenResponse> { private static final String REST_PATH = "v1/{+name}:generateIdToken"; private final java.util.regex.Pattern NAME_PATTERN = java.util.regex.Pattern.compile("^projects/[^/]+/serviceAccounts/[^/]+$"); /** * Generates an OpenID Connect ID token for a service account. * * Create a request for the method "serviceAccounts.generateIdToken". * * This request holds the parameters needed by the the iamcredentials server. After setting any * optional parameters, call the {@link GenerateIdToken#execute()} method to invoke the remote * operation. <p> {@link GenerateIdToken#initialize(com.google.api.client.googleapis.services.Abst * ractGoogleClientRequest)} must be called to initialize this instance immediately after invoking * the constructor. </p> * * @param name Required. The resource name of the service account for which the credentials are requested, in the * following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` * wildcard character is required; replacing it with a project ID is invalid. * @param content the {@link com.google.api.services.iamcredentials.v1.model.GenerateIdTokenRequest} * @since 1.13 */ protected GenerateIdToken(java.lang.String name, com.google.api.services.iamcredentials.v1.model.GenerateIdTokenRequest content) { super(IAMCredentials.this, "POST", REST_PATH, content, com.google.api.services.iamcredentials.v1.model.GenerateIdTokenResponse.class); this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified."); if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/serviceAccounts/[^/]+$"); } } @Override public GenerateIdToken set$Xgafv(java.lang.String $Xgafv) { return (GenerateIdToken) super.set$Xgafv($Xgafv); } @Override public GenerateIdToken setAccessToken(java.lang.String accessToken) { return (GenerateIdToken) super.setAccessToken(accessToken); } @Override public GenerateIdToken setAlt(java.lang.String alt) { return (GenerateIdToken) super.setAlt(alt); } @Override public GenerateIdToken setCallback(java.lang.String callback) { return (GenerateIdToken) super.setCallback(callback); } @Override public GenerateIdToken setFields(java.lang.String fields) { return (GenerateIdToken) super.setFields(fields); } @Override public GenerateIdToken setKey(java.lang.String key) { return (GenerateIdToken) super.setKey(key); } @Override public GenerateIdToken setOauthToken(java.lang.String oauthToken) { return (GenerateIdToken) super.setOauthToken(oauthToken); } @Override public GenerateIdToken setPrettyPrint(java.lang.Boolean prettyPrint) { return (GenerateIdToken) super.setPrettyPrint(prettyPrint); } @Override public GenerateIdToken setQuotaUser(java.lang.String quotaUser) { return (GenerateIdToken) super.setQuotaUser(quotaUser); } @Override public GenerateIdToken setUploadType(java.lang.String uploadType) { return (GenerateIdToken) super.setUploadType(uploadType); } @Override public GenerateIdToken setUploadProtocol(java.lang.String uploadProtocol) { return (GenerateIdToken) super.setUploadProtocol(uploadProtocol); } /** * Required. The resource name of the service account for which the credentials are * requested, in the following format: * `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is * required; replacing it with a project ID is invalid. */ @com.google.api.client.util.Key private java.lang.String name; /** Required. The resource name of the service account for which the credentials are requested, in the following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is required; replacing it with a project ID is invalid. */ public java.lang.String getName() { return name; } /** * Required. The resource name of the service account for which the credentials are * requested, in the following format: * `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is * required; replacing it with a project ID is invalid. */ public GenerateIdToken setName(java.lang.String name) { if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/serviceAccounts/[^/]+$"); } this.name = name; return this; } @Override public GenerateIdToken set(String parameterName, Object value) { return (GenerateIdToken) super.set(parameterName, value); } } /** * Signs a blob using a service account's system-managed private key. * * Create a request for the method "serviceAccounts.signBlob". * * This request holds the parameters needed by the iamcredentials server. After setting any * optional parameters, call the {@link SignBlob#execute()} method to invoke the remote operation. * * @param name Required. The resource name of the service account for which the credentials are requested, in the * following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` * wildcard character is required; replacing it with a project ID is invalid. * @param content the {@link com.google.api.services.iamcredentials.v1.model.SignBlobRequest} * @return the request */ public SignBlob signBlob(java.lang.String name, com.google.api.services.iamcredentials.v1.model.SignBlobRequest content) throws java.io.IOException { SignBlob result = new SignBlob(name, content); initialize(result); return result; } public class SignBlob extends IAMCredentialsRequest<com.google.api.services.iamcredentials.v1.model.SignBlobResponse> { private static final String REST_PATH = "v1/{+name}:signBlob"; private final java.util.regex.Pattern NAME_PATTERN = java.util.regex.Pattern.compile("^projects/[^/]+/serviceAccounts/[^/]+$"); /** * Signs a blob using a service account's system-managed private key. * * Create a request for the method "serviceAccounts.signBlob". * * This request holds the parameters needed by the the iamcredentials server. After setting any * optional parameters, call the {@link SignBlob#execute()} method to invoke the remote operation. * <p> {@link * SignBlob#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} * must be called to initialize this instance immediately after invoking the constructor. </p> * * @param name Required. The resource name of the service account for which the credentials are requested, in the * following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` * wildcard character is required; replacing it with a project ID is invalid. * @param content the {@link com.google.api.services.iamcredentials.v1.model.SignBlobRequest} * @since 1.13 */ protected SignBlob(java.lang.String name, com.google.api.services.iamcredentials.v1.model.SignBlobRequest content) { super(IAMCredentials.this, "POST", REST_PATH, content, com.google.api.services.iamcredentials.v1.model.SignBlobResponse.class); this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified."); if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/serviceAccounts/[^/]+$"); } } @Override public SignBlob set$Xgafv(java.lang.String $Xgafv) { return (SignBlob) super.set$Xgafv($Xgafv); } @Override public SignBlob setAccessToken(java.lang.String accessToken) { return (SignBlob) super.setAccessToken(accessToken); } @Override public SignBlob setAlt(java.lang.String alt) { return (SignBlob) super.setAlt(alt); } @Override public SignBlob setCallback(java.lang.String callback) { return (SignBlob) super.setCallback(callback); } @Override public SignBlob setFields(java.lang.String fields) { return (SignBlob) super.setFields(fields); } @Override public SignBlob setKey(java.lang.String key) { return (SignBlob) super.setKey(key); } @Override public SignBlob setOauthToken(java.lang.String oauthToken) { return (SignBlob) super.setOauthToken(oauthToken); } @Override public SignBlob setPrettyPrint(java.lang.Boolean prettyPrint) { return (SignBlob) super.setPrettyPrint(prettyPrint); } @Override public SignBlob setQuotaUser(java.lang.String quotaUser) { return (SignBlob) super.setQuotaUser(quotaUser); } @Override public SignBlob setUploadType(java.lang.String uploadType) { return (SignBlob) super.setUploadType(uploadType); } @Override public SignBlob setUploadProtocol(java.lang.String uploadProtocol) { return (SignBlob) super.setUploadProtocol(uploadProtocol); } /** * Required. The resource name of the service account for which the credentials are * requested, in the following format: * `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is * required; replacing it with a project ID is invalid. */ @com.google.api.client.util.Key private java.lang.String name; /** Required. The resource name of the service account for which the credentials are requested, in the following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is required; replacing it with a project ID is invalid. */ public java.lang.String getName() { return name; } /** * Required. The resource name of the service account for which the credentials are * requested, in the following format: * `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is * required; replacing it with a project ID is invalid. */ public SignBlob setName(java.lang.String name) { if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/serviceAccounts/[^/]+$"); } this.name = name; return this; } @Override public SignBlob set(String parameterName, Object value) { return (SignBlob) super.set(parameterName, value); } } /** * Signs a JWT using a service account's system-managed private key. * * Create a request for the method "serviceAccounts.signJwt". * * This request holds the parameters needed by the iamcredentials server. After setting any * optional parameters, call the {@link SignJwt#execute()} method to invoke the remote operation. * * @param name Required. The resource name of the service account for which the credentials are requested, in the * following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` * wildcard character is required; replacing it with a project ID is invalid. * @param content the {@link com.google.api.services.iamcredentials.v1.model.SignJwtRequest} * @return the request */ public SignJwt signJwt(java.lang.String name, com.google.api.services.iamcredentials.v1.model.SignJwtRequest content) throws java.io.IOException { SignJwt result = new SignJwt(name, content); initialize(result); return result; } public class SignJwt extends IAMCredentialsRequest<com.google.api.services.iamcredentials.v1.model.SignJwtResponse> { private static final String REST_PATH = "v1/{+name}:signJwt"; private final java.util.regex.Pattern NAME_PATTERN = java.util.regex.Pattern.compile("^projects/[^/]+/serviceAccounts/[^/]+$"); /** * Signs a JWT using a service account's system-managed private key. * * Create a request for the method "serviceAccounts.signJwt". * * This request holds the parameters needed by the the iamcredentials server. After setting any * optional parameters, call the {@link SignJwt#execute()} method to invoke the remote operation. * <p> {@link * SignJwt#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must * be called to initialize this instance immediately after invoking the constructor. </p> * * @param name Required. The resource name of the service account for which the credentials are requested, in the * following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` * wildcard character is required; replacing it with a project ID is invalid. * @param content the {@link com.google.api.services.iamcredentials.v1.model.SignJwtRequest} * @since 1.13 */ protected SignJwt(java.lang.String name, com.google.api.services.iamcredentials.v1.model.SignJwtRequest content) { super(IAMCredentials.this, "POST", REST_PATH, content, com.google.api.services.iamcredentials.v1.model.SignJwtResponse.class); this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified."); if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/serviceAccounts/[^/]+$"); } } @Override public SignJwt set$Xgafv(java.lang.String $Xgafv) { return (SignJwt) super.set$Xgafv($Xgafv); } @Override public SignJwt setAccessToken(java.lang.String accessToken) { return (SignJwt) super.setAccessToken(accessToken); } @Override public SignJwt setAlt(java.lang.String alt) { return (SignJwt) super.setAlt(alt); } @Override public SignJwt setCallback(java.lang.String callback) { return (SignJwt) super.setCallback(callback); } @Override public SignJwt setFields(java.lang.String fields) { return (SignJwt) super.setFields(fields); } @Override public SignJwt setKey(java.lang.String key) { return (SignJwt) super.setKey(key); } @Override public SignJwt setOauthToken(java.lang.String oauthToken) { return (SignJwt) super.setOauthToken(oauthToken); } @Override public SignJwt setPrettyPrint(java.lang.Boolean prettyPrint) { return (SignJwt) super.setPrettyPrint(prettyPrint); } @Override public SignJwt setQuotaUser(java.lang.String quotaUser) { return (SignJwt) super.setQuotaUser(quotaUser); } @Override public SignJwt setUploadType(java.lang.String uploadType) { return (SignJwt) super.setUploadType(uploadType); } @Override public SignJwt setUploadProtocol(java.lang.String uploadProtocol) { return (SignJwt) super.setUploadProtocol(uploadProtocol); } /** * Required. The resource name of the service account for which the credentials are * requested, in the following format: * `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is * required; replacing it with a project ID is invalid. */ @com.google.api.client.util.Key private java.lang.String name; /** Required. The resource name of the service account for which the credentials are requested, in the following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is required; replacing it with a project ID is invalid. */ public java.lang.String getName() { return name; } /** * Required. The resource name of the service account for which the credentials are * requested, in the following format: * `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is * required; replacing it with a project ID is invalid. */ public SignJwt setName(java.lang.String name) { if (!getSuppressPatternChecks()) { com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(), "Parameter name must conform to the pattern " + "^projects/[^/]+/serviceAccounts/[^/]+$"); } this.name = name; return this; } @Override public SignJwt set(String parameterName, Object value) { return (SignJwt) super.set(parameterName, value); } } } } /** * Builder for {@link IAMCredentials}. * * <p> * Implementation is not thread-safe. * </p> * * @since 1.3.0 */ public static final class Builder extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient.Builder { /** * Returns an instance of a new builder. * * @param transport HTTP transport, which should normally be: * <ul> * <li>Google App Engine: * {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li> * <li>Android: {@code newCompatibleTransport} from * {@code com.google.api.client.extensions.android.http.AndroidHttp}</li> * <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()} * </li> * </ul> * @param jsonFactory JSON factory, which may be: * <ul> * <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li> * <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li> * <li>Android Honeycomb or higher: * {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li> * </ul> * @param httpRequestInitializer HTTP request initializer or {@code null} for none * @since 1.7 */ public Builder(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory, com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { super( transport, jsonFactory, DEFAULT_ROOT_URL, DEFAULT_SERVICE_PATH, httpRequestInitializer, false); setBatchPath(DEFAULT_BATCH_PATH); } /** Builds a new instance of {@link IAMCredentials}. */ @Override public IAMCredentials build() { return new IAMCredentials(this); } @Override public Builder setRootUrl(String rootUrl) { return (Builder) super.setRootUrl(rootUrl); } @Override public Builder setServicePath(String servicePath) { return (Builder) super.setServicePath(servicePath); } @Override public Builder setBatchPath(String batchPath) { return (Builder) super.setBatchPath(batchPath); } @Override public Builder setHttpRequestInitializer(com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { return (Builder) super.setHttpRequestInitializer(httpRequestInitializer); } @Override public Builder setApplicationName(String applicationName) { return (Builder) super.setApplicationName(applicationName); } @Override public Builder setSuppressPatternChecks(boolean suppressPatternChecks) { return (Builder) super.setSuppressPatternChecks(suppressPatternChecks); } @Override public Builder setSuppressRequiredParameterChecks(boolean suppressRequiredParameterChecks) { return (Builder) super.setSuppressRequiredParameterChecks(suppressRequiredParameterChecks); } @Override public Builder setSuppressAllChecks(boolean suppressAllChecks) { return (Builder) super.setSuppressAllChecks(suppressAllChecks); } /** * Set the {@link IAMCredentialsRequestInitializer}. * * @since 1.12 */ public Builder setIAMCredentialsRequestInitializer( IAMCredentialsRequestInitializer iamcredentialsRequestInitializer) { return (Builder) super.setGoogleClientRequestInitializer(iamcredentialsRequestInitializer); } @Override public Builder setGoogleClientRequestInitializer( com.google.api.client.googleapis.services.GoogleClientRequestInitializer googleClientRequestInitializer) { return (Builder) super.setGoogleClientRequestInitializer(googleClientRequestInitializer); } } }
googleapis/google-cloud-java
36,553
java-area120-tables/proto-google-area120-tables-v1alpha1/src/main/java/com/google/area120/tables/v1alpha1/ListWorkspacesResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/area120/tables/v1alpha1/tables.proto // Protobuf Java Version: 3.25.8 package com.google.area120.tables.v1alpha1; /** * * * <pre> * Response message for TablesService.ListWorkspaces. * </pre> * * Protobuf type {@code google.area120.tables.v1alpha1.ListWorkspacesResponse} */ public final class ListWorkspacesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.area120.tables.v1alpha1.ListWorkspacesResponse) ListWorkspacesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListWorkspacesResponse.newBuilder() to construct. private ListWorkspacesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListWorkspacesResponse() { workspaces_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListWorkspacesResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.area120.tables.v1alpha1.TablesProto .internal_static_google_area120_tables_v1alpha1_ListWorkspacesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.area120.tables.v1alpha1.TablesProto .internal_static_google_area120_tables_v1alpha1_ListWorkspacesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.area120.tables.v1alpha1.ListWorkspacesResponse.class, com.google.area120.tables.v1alpha1.ListWorkspacesResponse.Builder.class); } public static final int WORKSPACES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.area120.tables.v1alpha1.Workspace> workspaces_; /** * * * <pre> * The list of workspaces. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Workspace workspaces = 1;</code> */ @java.lang.Override public java.util.List<com.google.area120.tables.v1alpha1.Workspace> getWorkspacesList() { return workspaces_; } /** * * * <pre> * The list of workspaces. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Workspace workspaces = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.area120.tables.v1alpha1.WorkspaceOrBuilder> getWorkspacesOrBuilderList() { return workspaces_; } /** * * * <pre> * The list of workspaces. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Workspace workspaces = 1;</code> */ @java.lang.Override public int getWorkspacesCount() { return workspaces_.size(); } /** * * * <pre> * The list of workspaces. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Workspace workspaces = 1;</code> */ @java.lang.Override public com.google.area120.tables.v1alpha1.Workspace getWorkspaces(int index) { return workspaces_.get(index); } /** * * * <pre> * The list of workspaces. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Workspace workspaces = 1;</code> */ @java.lang.Override public com.google.area120.tables.v1alpha1.WorkspaceOrBuilder getWorkspacesOrBuilder(int index) { return workspaces_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is empty, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is empty, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < workspaces_.size(); i++) { output.writeMessage(1, workspaces_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < workspaces_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, workspaces_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.area120.tables.v1alpha1.ListWorkspacesResponse)) { return super.equals(obj); } com.google.area120.tables.v1alpha1.ListWorkspacesResponse other = (com.google.area120.tables.v1alpha1.ListWorkspacesResponse) obj; if (!getWorkspacesList().equals(other.getWorkspacesList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getWorkspacesCount() > 0) { hash = (37 * hash) + WORKSPACES_FIELD_NUMBER; hash = (53 * hash) + getWorkspacesList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.area120.tables.v1alpha1.ListWorkspacesResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.area120.tables.v1alpha1.ListWorkspacesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.area120.tables.v1alpha1.ListWorkspacesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.area120.tables.v1alpha1.ListWorkspacesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.area120.tables.v1alpha1.ListWorkspacesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.area120.tables.v1alpha1.ListWorkspacesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.area120.tables.v1alpha1.ListWorkspacesResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.area120.tables.v1alpha1.ListWorkspacesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.area120.tables.v1alpha1.ListWorkspacesResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.area120.tables.v1alpha1.ListWorkspacesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.area120.tables.v1alpha1.ListWorkspacesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.area120.tables.v1alpha1.ListWorkspacesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.area120.tables.v1alpha1.ListWorkspacesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for TablesService.ListWorkspaces. * </pre> * * Protobuf type {@code google.area120.tables.v1alpha1.ListWorkspacesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.area120.tables.v1alpha1.ListWorkspacesResponse) com.google.area120.tables.v1alpha1.ListWorkspacesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.area120.tables.v1alpha1.TablesProto .internal_static_google_area120_tables_v1alpha1_ListWorkspacesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.area120.tables.v1alpha1.TablesProto .internal_static_google_area120_tables_v1alpha1_ListWorkspacesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.area120.tables.v1alpha1.ListWorkspacesResponse.class, com.google.area120.tables.v1alpha1.ListWorkspacesResponse.Builder.class); } // Construct using com.google.area120.tables.v1alpha1.ListWorkspacesResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (workspacesBuilder_ == null) { workspaces_ = java.util.Collections.emptyList(); } else { workspaces_ = null; workspacesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.area120.tables.v1alpha1.TablesProto .internal_static_google_area120_tables_v1alpha1_ListWorkspacesResponse_descriptor; } @java.lang.Override public com.google.area120.tables.v1alpha1.ListWorkspacesResponse getDefaultInstanceForType() { return com.google.area120.tables.v1alpha1.ListWorkspacesResponse.getDefaultInstance(); } @java.lang.Override public com.google.area120.tables.v1alpha1.ListWorkspacesResponse build() { com.google.area120.tables.v1alpha1.ListWorkspacesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.area120.tables.v1alpha1.ListWorkspacesResponse buildPartial() { com.google.area120.tables.v1alpha1.ListWorkspacesResponse result = new com.google.area120.tables.v1alpha1.ListWorkspacesResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.area120.tables.v1alpha1.ListWorkspacesResponse result) { if (workspacesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { workspaces_ = java.util.Collections.unmodifiableList(workspaces_); bitField0_ = (bitField0_ & ~0x00000001); } result.workspaces_ = workspaces_; } else { result.workspaces_ = workspacesBuilder_.build(); } } private void buildPartial0(com.google.area120.tables.v1alpha1.ListWorkspacesResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.area120.tables.v1alpha1.ListWorkspacesResponse) { return mergeFrom((com.google.area120.tables.v1alpha1.ListWorkspacesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.area120.tables.v1alpha1.ListWorkspacesResponse other) { if (other == com.google.area120.tables.v1alpha1.ListWorkspacesResponse.getDefaultInstance()) return this; if (workspacesBuilder_ == null) { if (!other.workspaces_.isEmpty()) { if (workspaces_.isEmpty()) { workspaces_ = other.workspaces_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureWorkspacesIsMutable(); workspaces_.addAll(other.workspaces_); } onChanged(); } } else { if (!other.workspaces_.isEmpty()) { if (workspacesBuilder_.isEmpty()) { workspacesBuilder_.dispose(); workspacesBuilder_ = null; workspaces_ = other.workspaces_; bitField0_ = (bitField0_ & ~0x00000001); workspacesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getWorkspacesFieldBuilder() : null; } else { workspacesBuilder_.addAllMessages(other.workspaces_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.area120.tables.v1alpha1.Workspace m = input.readMessage( com.google.area120.tables.v1alpha1.Workspace.parser(), extensionRegistry); if (workspacesBuilder_ == null) { ensureWorkspacesIsMutable(); workspaces_.add(m); } else { workspacesBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.area120.tables.v1alpha1.Workspace> workspaces_ = java.util.Collections.emptyList(); private void ensureWorkspacesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { workspaces_ = new java.util.ArrayList<com.google.area120.tables.v1alpha1.Workspace>(workspaces_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.area120.tables.v1alpha1.Workspace, com.google.area120.tables.v1alpha1.Workspace.Builder, com.google.area120.tables.v1alpha1.WorkspaceOrBuilder> workspacesBuilder_; /** * * * <pre> * The list of workspaces. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Workspace workspaces = 1;</code> */ public java.util.List<com.google.area120.tables.v1alpha1.Workspace> getWorkspacesList() { if (workspacesBuilder_ == null) { return java.util.Collections.unmodifiableList(workspaces_); } else { return workspacesBuilder_.getMessageList(); } } /** * * * <pre> * The list of workspaces. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Workspace workspaces = 1;</code> */ public int getWorkspacesCount() { if (workspacesBuilder_ == null) { return workspaces_.size(); } else { return workspacesBuilder_.getCount(); } } /** * * * <pre> * The list of workspaces. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Workspace workspaces = 1;</code> */ public com.google.area120.tables.v1alpha1.Workspace getWorkspaces(int index) { if (workspacesBuilder_ == null) { return workspaces_.get(index); } else { return workspacesBuilder_.getMessage(index); } } /** * * * <pre> * The list of workspaces. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Workspace workspaces = 1;</code> */ public Builder setWorkspaces(int index, com.google.area120.tables.v1alpha1.Workspace value) { if (workspacesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureWorkspacesIsMutable(); workspaces_.set(index, value); onChanged(); } else { workspacesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of workspaces. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Workspace workspaces = 1;</code> */ public Builder setWorkspaces( int index, com.google.area120.tables.v1alpha1.Workspace.Builder builderForValue) { if (workspacesBuilder_ == null) { ensureWorkspacesIsMutable(); workspaces_.set(index, builderForValue.build()); onChanged(); } else { workspacesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of workspaces. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Workspace workspaces = 1;</code> */ public Builder addWorkspaces(com.google.area120.tables.v1alpha1.Workspace value) { if (workspacesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureWorkspacesIsMutable(); workspaces_.add(value); onChanged(); } else { workspacesBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of workspaces. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Workspace workspaces = 1;</code> */ public Builder addWorkspaces(int index, com.google.area120.tables.v1alpha1.Workspace value) { if (workspacesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureWorkspacesIsMutable(); workspaces_.add(index, value); onChanged(); } else { workspacesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of workspaces. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Workspace workspaces = 1;</code> */ public Builder addWorkspaces( com.google.area120.tables.v1alpha1.Workspace.Builder builderForValue) { if (workspacesBuilder_ == null) { ensureWorkspacesIsMutable(); workspaces_.add(builderForValue.build()); onChanged(); } else { workspacesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of workspaces. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Workspace workspaces = 1;</code> */ public Builder addWorkspaces( int index, com.google.area120.tables.v1alpha1.Workspace.Builder builderForValue) { if (workspacesBuilder_ == null) { ensureWorkspacesIsMutable(); workspaces_.add(index, builderForValue.build()); onChanged(); } else { workspacesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of workspaces. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Workspace workspaces = 1;</code> */ public Builder addAllWorkspaces( java.lang.Iterable<? extends com.google.area120.tables.v1alpha1.Workspace> values) { if (workspacesBuilder_ == null) { ensureWorkspacesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, workspaces_); onChanged(); } else { workspacesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of workspaces. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Workspace workspaces = 1;</code> */ public Builder clearWorkspaces() { if (workspacesBuilder_ == null) { workspaces_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { workspacesBuilder_.clear(); } return this; } /** * * * <pre> * The list of workspaces. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Workspace workspaces = 1;</code> */ public Builder removeWorkspaces(int index) { if (workspacesBuilder_ == null) { ensureWorkspacesIsMutable(); workspaces_.remove(index); onChanged(); } else { workspacesBuilder_.remove(index); } return this; } /** * * * <pre> * The list of workspaces. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Workspace workspaces = 1;</code> */ public com.google.area120.tables.v1alpha1.Workspace.Builder getWorkspacesBuilder(int index) { return getWorkspacesFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of workspaces. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Workspace workspaces = 1;</code> */ public com.google.area120.tables.v1alpha1.WorkspaceOrBuilder getWorkspacesOrBuilder(int index) { if (workspacesBuilder_ == null) { return workspaces_.get(index); } else { return workspacesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of workspaces. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Workspace workspaces = 1;</code> */ public java.util.List<? extends com.google.area120.tables.v1alpha1.WorkspaceOrBuilder> getWorkspacesOrBuilderList() { if (workspacesBuilder_ != null) { return workspacesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(workspaces_); } } /** * * * <pre> * The list of workspaces. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Workspace workspaces = 1;</code> */ public com.google.area120.tables.v1alpha1.Workspace.Builder addWorkspacesBuilder() { return getWorkspacesFieldBuilder() .addBuilder(com.google.area120.tables.v1alpha1.Workspace.getDefaultInstance()); } /** * * * <pre> * The list of workspaces. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Workspace workspaces = 1;</code> */ public com.google.area120.tables.v1alpha1.Workspace.Builder addWorkspacesBuilder(int index) { return getWorkspacesFieldBuilder() .addBuilder(index, com.google.area120.tables.v1alpha1.Workspace.getDefaultInstance()); } /** * * * <pre> * The list of workspaces. * </pre> * * <code>repeated .google.area120.tables.v1alpha1.Workspace workspaces = 1;</code> */ public java.util.List<com.google.area120.tables.v1alpha1.Workspace.Builder> getWorkspacesBuilderList() { return getWorkspacesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.area120.tables.v1alpha1.Workspace, com.google.area120.tables.v1alpha1.Workspace.Builder, com.google.area120.tables.v1alpha1.WorkspaceOrBuilder> getWorkspacesFieldBuilder() { if (workspacesBuilder_ == null) { workspacesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.area120.tables.v1alpha1.Workspace, com.google.area120.tables.v1alpha1.Workspace.Builder, com.google.area120.tables.v1alpha1.WorkspaceOrBuilder>( workspaces_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); workspaces_ = null; } return workspacesBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is empty, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is empty, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is empty, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is empty, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is empty, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.area120.tables.v1alpha1.ListWorkspacesResponse) } // @@protoc_insertion_point(class_scope:google.area120.tables.v1alpha1.ListWorkspacesResponse) private static final com.google.area120.tables.v1alpha1.ListWorkspacesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.area120.tables.v1alpha1.ListWorkspacesResponse(); } public static com.google.area120.tables.v1alpha1.ListWorkspacesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListWorkspacesResponse> PARSER = new com.google.protobuf.AbstractParser<ListWorkspacesResponse>() { @java.lang.Override public ListWorkspacesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListWorkspacesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListWorkspacesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.area120.tables.v1alpha1.ListWorkspacesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,962
java-aiplatform/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/TensorboardServiceSettings.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.aiplatform.v1; import static com.google.cloud.aiplatform.v1.TensorboardServiceClient.ExportTensorboardTimeSeriesDataPagedResponse; import static com.google.cloud.aiplatform.v1.TensorboardServiceClient.ListLocationsPagedResponse; import static com.google.cloud.aiplatform.v1.TensorboardServiceClient.ListTensorboardExperimentsPagedResponse; import static com.google.cloud.aiplatform.v1.TensorboardServiceClient.ListTensorboardRunsPagedResponse; import static com.google.cloud.aiplatform.v1.TensorboardServiceClient.ListTensorboardTimeSeriesPagedResponse; import static com.google.cloud.aiplatform.v1.TensorboardServiceClient.ListTensorboardsPagedResponse; import com.google.api.core.ApiFunction; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.ClientSettings; import com.google.api.gax.rpc.OperationCallSettings; import com.google.api.gax.rpc.PagedCallSettings; import com.google.api.gax.rpc.ServerStreamingCallSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.cloud.aiplatform.v1.stub.TensorboardServiceStubSettings; import com.google.cloud.location.GetLocationRequest; import com.google.cloud.location.ListLocationsRequest; import com.google.cloud.location.ListLocationsResponse; import com.google.cloud.location.Location; import com.google.iam.v1.GetIamPolicyRequest; import com.google.iam.v1.Policy; import com.google.iam.v1.SetIamPolicyRequest; import com.google.iam.v1.TestIamPermissionsRequest; import com.google.iam.v1.TestIamPermissionsResponse; import com.google.longrunning.Operation; import com.google.protobuf.Empty; import java.io.IOException; import java.util.List; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link TensorboardServiceClient}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (aiplatform.googleapis.com) and default port (443) are used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the * [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings) * of getTensorboard: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * TensorboardServiceSettings.Builder tensorboardServiceSettingsBuilder = * TensorboardServiceSettings.newBuilder(); * tensorboardServiceSettingsBuilder * .getTensorboardSettings() * .setRetrySettings( * tensorboardServiceSettingsBuilder * .getTensorboardSettings() * .getRetrySettings() * .toBuilder() * .setInitialRetryDelayDuration(Duration.ofSeconds(1)) * .setInitialRpcTimeoutDuration(Duration.ofSeconds(5)) * .setMaxAttempts(5) * .setMaxRetryDelayDuration(Duration.ofSeconds(30)) * .setMaxRpcTimeoutDuration(Duration.ofSeconds(60)) * .setRetryDelayMultiplier(1.3) * .setRpcTimeoutMultiplier(1.5) * .setTotalTimeoutDuration(Duration.ofSeconds(300)) * .build()); * TensorboardServiceSettings tensorboardServiceSettings = * tensorboardServiceSettingsBuilder.build(); * }</pre> * * Please refer to the [Client Side Retry * Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for * additional support in setting retries. * * <p>To configure the RetrySettings of a Long Running Operation method, create an * OperationTimedPollAlgorithm object and update the RPC's polling algorithm. For example, to * configure the RetrySettings for createTensorboard: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * TensorboardServiceSettings.Builder tensorboardServiceSettingsBuilder = * TensorboardServiceSettings.newBuilder(); * TimedRetryAlgorithm timedRetryAlgorithm = * OperationalTimedPollAlgorithm.create( * RetrySettings.newBuilder() * .setInitialRetryDelayDuration(Duration.ofMillis(500)) * .setRetryDelayMultiplier(1.5) * .setMaxRetryDelayDuration(Duration.ofMillis(5000)) * .setTotalTimeoutDuration(Duration.ofHours(24)) * .build()); * tensorboardServiceSettingsBuilder * .createClusterOperationSettings() * .setPollingAlgorithm(timedRetryAlgorithm) * .build(); * }</pre> */ @Generated("by gapic-generator-java") public class TensorboardServiceSettings extends ClientSettings<TensorboardServiceSettings> { /** Returns the object with the settings used for calls to createTensorboard. */ public UnaryCallSettings<CreateTensorboardRequest, Operation> createTensorboardSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).createTensorboardSettings(); } /** Returns the object with the settings used for calls to createTensorboard. */ public OperationCallSettings< CreateTensorboardRequest, Tensorboard, CreateTensorboardOperationMetadata> createTensorboardOperationSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .createTensorboardOperationSettings(); } /** Returns the object with the settings used for calls to getTensorboard. */ public UnaryCallSettings<GetTensorboardRequest, Tensorboard> getTensorboardSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).getTensorboardSettings(); } /** Returns the object with the settings used for calls to updateTensorboard. */ public UnaryCallSettings<UpdateTensorboardRequest, Operation> updateTensorboardSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).updateTensorboardSettings(); } /** Returns the object with the settings used for calls to updateTensorboard. */ public OperationCallSettings< UpdateTensorboardRequest, Tensorboard, UpdateTensorboardOperationMetadata> updateTensorboardOperationSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .updateTensorboardOperationSettings(); } /** Returns the object with the settings used for calls to listTensorboards. */ public PagedCallSettings< ListTensorboardsRequest, ListTensorboardsResponse, ListTensorboardsPagedResponse> listTensorboardsSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).listTensorboardsSettings(); } /** Returns the object with the settings used for calls to deleteTensorboard. */ public UnaryCallSettings<DeleteTensorboardRequest, Operation> deleteTensorboardSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).deleteTensorboardSettings(); } /** Returns the object with the settings used for calls to deleteTensorboard. */ public OperationCallSettings<DeleteTensorboardRequest, Empty, DeleteOperationMetadata> deleteTensorboardOperationSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .deleteTensorboardOperationSettings(); } /** Returns the object with the settings used for calls to readTensorboardUsage. */ public UnaryCallSettings<ReadTensorboardUsageRequest, ReadTensorboardUsageResponse> readTensorboardUsageSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).readTensorboardUsageSettings(); } /** Returns the object with the settings used for calls to readTensorboardSize. */ public UnaryCallSettings<ReadTensorboardSizeRequest, ReadTensorboardSizeResponse> readTensorboardSizeSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).readTensorboardSizeSettings(); } /** Returns the object with the settings used for calls to createTensorboardExperiment. */ public UnaryCallSettings<CreateTensorboardExperimentRequest, TensorboardExperiment> createTensorboardExperimentSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .createTensorboardExperimentSettings(); } /** Returns the object with the settings used for calls to getTensorboardExperiment. */ public UnaryCallSettings<GetTensorboardExperimentRequest, TensorboardExperiment> getTensorboardExperimentSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).getTensorboardExperimentSettings(); } /** Returns the object with the settings used for calls to updateTensorboardExperiment. */ public UnaryCallSettings<UpdateTensorboardExperimentRequest, TensorboardExperiment> updateTensorboardExperimentSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .updateTensorboardExperimentSettings(); } /** Returns the object with the settings used for calls to listTensorboardExperiments. */ public PagedCallSettings< ListTensorboardExperimentsRequest, ListTensorboardExperimentsResponse, ListTensorboardExperimentsPagedResponse> listTensorboardExperimentsSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .listTensorboardExperimentsSettings(); } /** Returns the object with the settings used for calls to deleteTensorboardExperiment. */ public UnaryCallSettings<DeleteTensorboardExperimentRequest, Operation> deleteTensorboardExperimentSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .deleteTensorboardExperimentSettings(); } /** Returns the object with the settings used for calls to deleteTensorboardExperiment. */ public OperationCallSettings<DeleteTensorboardExperimentRequest, Empty, DeleteOperationMetadata> deleteTensorboardExperimentOperationSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .deleteTensorboardExperimentOperationSettings(); } /** Returns the object with the settings used for calls to createTensorboardRun. */ public UnaryCallSettings<CreateTensorboardRunRequest, TensorboardRun> createTensorboardRunSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).createTensorboardRunSettings(); } /** Returns the object with the settings used for calls to batchCreateTensorboardRuns. */ public UnaryCallSettings<BatchCreateTensorboardRunsRequest, BatchCreateTensorboardRunsResponse> batchCreateTensorboardRunsSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .batchCreateTensorboardRunsSettings(); } /** Returns the object with the settings used for calls to getTensorboardRun. */ public UnaryCallSettings<GetTensorboardRunRequest, TensorboardRun> getTensorboardRunSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).getTensorboardRunSettings(); } /** Returns the object with the settings used for calls to updateTensorboardRun. */ public UnaryCallSettings<UpdateTensorboardRunRequest, TensorboardRun> updateTensorboardRunSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).updateTensorboardRunSettings(); } /** Returns the object with the settings used for calls to listTensorboardRuns. */ public PagedCallSettings< ListTensorboardRunsRequest, ListTensorboardRunsResponse, ListTensorboardRunsPagedResponse> listTensorboardRunsSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).listTensorboardRunsSettings(); } /** Returns the object with the settings used for calls to deleteTensorboardRun. */ public UnaryCallSettings<DeleteTensorboardRunRequest, Operation> deleteTensorboardRunSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).deleteTensorboardRunSettings(); } /** Returns the object with the settings used for calls to deleteTensorboardRun. */ public OperationCallSettings<DeleteTensorboardRunRequest, Empty, DeleteOperationMetadata> deleteTensorboardRunOperationSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .deleteTensorboardRunOperationSettings(); } /** Returns the object with the settings used for calls to batchCreateTensorboardTimeSeries. */ public UnaryCallSettings< BatchCreateTensorboardTimeSeriesRequest, BatchCreateTensorboardTimeSeriesResponse> batchCreateTensorboardTimeSeriesSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .batchCreateTensorboardTimeSeriesSettings(); } /** Returns the object with the settings used for calls to createTensorboardTimeSeries. */ public UnaryCallSettings<CreateTensorboardTimeSeriesRequest, TensorboardTimeSeries> createTensorboardTimeSeriesSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .createTensorboardTimeSeriesSettings(); } /** Returns the object with the settings used for calls to getTensorboardTimeSeries. */ public UnaryCallSettings<GetTensorboardTimeSeriesRequest, TensorboardTimeSeries> getTensorboardTimeSeriesSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).getTensorboardTimeSeriesSettings(); } /** Returns the object with the settings used for calls to updateTensorboardTimeSeries. */ public UnaryCallSettings<UpdateTensorboardTimeSeriesRequest, TensorboardTimeSeries> updateTensorboardTimeSeriesSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .updateTensorboardTimeSeriesSettings(); } /** Returns the object with the settings used for calls to listTensorboardTimeSeries. */ public PagedCallSettings< ListTensorboardTimeSeriesRequest, ListTensorboardTimeSeriesResponse, ListTensorboardTimeSeriesPagedResponse> listTensorboardTimeSeriesSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).listTensorboardTimeSeriesSettings(); } /** Returns the object with the settings used for calls to deleteTensorboardTimeSeries. */ public UnaryCallSettings<DeleteTensorboardTimeSeriesRequest, Operation> deleteTensorboardTimeSeriesSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .deleteTensorboardTimeSeriesSettings(); } /** Returns the object with the settings used for calls to deleteTensorboardTimeSeries. */ public OperationCallSettings<DeleteTensorboardTimeSeriesRequest, Empty, DeleteOperationMetadata> deleteTensorboardTimeSeriesOperationSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .deleteTensorboardTimeSeriesOperationSettings(); } /** Returns the object with the settings used for calls to batchReadTensorboardTimeSeriesData. */ public UnaryCallSettings< BatchReadTensorboardTimeSeriesDataRequest, BatchReadTensorboardTimeSeriesDataResponse> batchReadTensorboardTimeSeriesDataSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .batchReadTensorboardTimeSeriesDataSettings(); } /** Returns the object with the settings used for calls to readTensorboardTimeSeriesData. */ public UnaryCallSettings< ReadTensorboardTimeSeriesDataRequest, ReadTensorboardTimeSeriesDataResponse> readTensorboardTimeSeriesDataSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .readTensorboardTimeSeriesDataSettings(); } /** Returns the object with the settings used for calls to readTensorboardBlobData. */ public ServerStreamingCallSettings< ReadTensorboardBlobDataRequest, ReadTensorboardBlobDataResponse> readTensorboardBlobDataSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).readTensorboardBlobDataSettings(); } /** Returns the object with the settings used for calls to writeTensorboardExperimentData. */ public UnaryCallSettings< WriteTensorboardExperimentDataRequest, WriteTensorboardExperimentDataResponse> writeTensorboardExperimentDataSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .writeTensorboardExperimentDataSettings(); } /** Returns the object with the settings used for calls to writeTensorboardRunData. */ public UnaryCallSettings<WriteTensorboardRunDataRequest, WriteTensorboardRunDataResponse> writeTensorboardRunDataSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).writeTensorboardRunDataSettings(); } /** Returns the object with the settings used for calls to exportTensorboardTimeSeriesData. */ public PagedCallSettings< ExportTensorboardTimeSeriesDataRequest, ExportTensorboardTimeSeriesDataResponse, ExportTensorboardTimeSeriesDataPagedResponse> exportTensorboardTimeSeriesDataSettings() { return ((TensorboardServiceStubSettings) getStubSettings()) .exportTensorboardTimeSeriesDataSettings(); } /** Returns the object with the settings used for calls to listLocations. */ public PagedCallSettings<ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).listLocationsSettings(); } /** Returns the object with the settings used for calls to getLocation. */ public UnaryCallSettings<GetLocationRequest, Location> getLocationSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).getLocationSettings(); } /** Returns the object with the settings used for calls to setIamPolicy. */ public UnaryCallSettings<SetIamPolicyRequest, Policy> setIamPolicySettings() { return ((TensorboardServiceStubSettings) getStubSettings()).setIamPolicySettings(); } /** Returns the object with the settings used for calls to getIamPolicy. */ public UnaryCallSettings<GetIamPolicyRequest, Policy> getIamPolicySettings() { return ((TensorboardServiceStubSettings) getStubSettings()).getIamPolicySettings(); } /** Returns the object with the settings used for calls to testIamPermissions. */ public UnaryCallSettings<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsSettings() { return ((TensorboardServiceStubSettings) getStubSettings()).testIamPermissionsSettings(); } public static final TensorboardServiceSettings create(TensorboardServiceStubSettings stub) throws IOException { return new TensorboardServiceSettings.Builder(stub.toBuilder()).build(); } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return TensorboardServiceStubSettings.defaultExecutorProviderBuilder(); } /** Returns the default service endpoint. */ public static String getDefaultEndpoint() { return TensorboardServiceStubSettings.getDefaultEndpoint(); } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return TensorboardServiceStubSettings.getDefaultServiceScopes(); } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return TensorboardServiceStubSettings.defaultCredentialsProviderBuilder(); } /** Returns a builder for the default ChannelProvider for this service. */ public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { return TensorboardServiceStubSettings.defaultGrpcTransportProviderBuilder(); } public static TransportChannelProvider defaultTransportChannelProvider() { return TensorboardServiceStubSettings.defaultTransportChannelProvider(); } public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return TensorboardServiceStubSettings.defaultApiClientHeaderProviderBuilder(); } /** Returns a new builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected TensorboardServiceSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); } /** Builder for TensorboardServiceSettings. */ public static class Builder extends ClientSettings.Builder<TensorboardServiceSettings, Builder> { protected Builder() throws IOException { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(TensorboardServiceStubSettings.newBuilder(clientContext)); } protected Builder(TensorboardServiceSettings settings) { super(settings.getStubSettings().toBuilder()); } protected Builder(TensorboardServiceStubSettings.Builder stubSettings) { super(stubSettings); } private static Builder createDefault() { return new Builder(TensorboardServiceStubSettings.newBuilder()); } public TensorboardServiceStubSettings.Builder getStubSettingsBuilder() { return ((TensorboardServiceStubSettings.Builder) getStubSettings()); } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods( getStubSettingsBuilder().unaryMethodSettingsBuilders(), settingsUpdater); return this; } /** Returns the builder for the settings used for calls to createTensorboard. */ public UnaryCallSettings.Builder<CreateTensorboardRequest, Operation> createTensorboardSettings() { return getStubSettingsBuilder().createTensorboardSettings(); } /** Returns the builder for the settings used for calls to createTensorboard. */ public OperationCallSettings.Builder< CreateTensorboardRequest, Tensorboard, CreateTensorboardOperationMetadata> createTensorboardOperationSettings() { return getStubSettingsBuilder().createTensorboardOperationSettings(); } /** Returns the builder for the settings used for calls to getTensorboard. */ public UnaryCallSettings.Builder<GetTensorboardRequest, Tensorboard> getTensorboardSettings() { return getStubSettingsBuilder().getTensorboardSettings(); } /** Returns the builder for the settings used for calls to updateTensorboard. */ public UnaryCallSettings.Builder<UpdateTensorboardRequest, Operation> updateTensorboardSettings() { return getStubSettingsBuilder().updateTensorboardSettings(); } /** Returns the builder for the settings used for calls to updateTensorboard. */ public OperationCallSettings.Builder< UpdateTensorboardRequest, Tensorboard, UpdateTensorboardOperationMetadata> updateTensorboardOperationSettings() { return getStubSettingsBuilder().updateTensorboardOperationSettings(); } /** Returns the builder for the settings used for calls to listTensorboards. */ public PagedCallSettings.Builder< ListTensorboardsRequest, ListTensorboardsResponse, ListTensorboardsPagedResponse> listTensorboardsSettings() { return getStubSettingsBuilder().listTensorboardsSettings(); } /** Returns the builder for the settings used for calls to deleteTensorboard. */ public UnaryCallSettings.Builder<DeleteTensorboardRequest, Operation> deleteTensorboardSettings() { return getStubSettingsBuilder().deleteTensorboardSettings(); } /** Returns the builder for the settings used for calls to deleteTensorboard. */ public OperationCallSettings.Builder<DeleteTensorboardRequest, Empty, DeleteOperationMetadata> deleteTensorboardOperationSettings() { return getStubSettingsBuilder().deleteTensorboardOperationSettings(); } /** Returns the builder for the settings used for calls to readTensorboardUsage. */ public UnaryCallSettings.Builder<ReadTensorboardUsageRequest, ReadTensorboardUsageResponse> readTensorboardUsageSettings() { return getStubSettingsBuilder().readTensorboardUsageSettings(); } /** Returns the builder for the settings used for calls to readTensorboardSize. */ public UnaryCallSettings.Builder<ReadTensorboardSizeRequest, ReadTensorboardSizeResponse> readTensorboardSizeSettings() { return getStubSettingsBuilder().readTensorboardSizeSettings(); } /** Returns the builder for the settings used for calls to createTensorboardExperiment. */ public UnaryCallSettings.Builder<CreateTensorboardExperimentRequest, TensorboardExperiment> createTensorboardExperimentSettings() { return getStubSettingsBuilder().createTensorboardExperimentSettings(); } /** Returns the builder for the settings used for calls to getTensorboardExperiment. */ public UnaryCallSettings.Builder<GetTensorboardExperimentRequest, TensorboardExperiment> getTensorboardExperimentSettings() { return getStubSettingsBuilder().getTensorboardExperimentSettings(); } /** Returns the builder for the settings used for calls to updateTensorboardExperiment. */ public UnaryCallSettings.Builder<UpdateTensorboardExperimentRequest, TensorboardExperiment> updateTensorboardExperimentSettings() { return getStubSettingsBuilder().updateTensorboardExperimentSettings(); } /** Returns the builder for the settings used for calls to listTensorboardExperiments. */ public PagedCallSettings.Builder< ListTensorboardExperimentsRequest, ListTensorboardExperimentsResponse, ListTensorboardExperimentsPagedResponse> listTensorboardExperimentsSettings() { return getStubSettingsBuilder().listTensorboardExperimentsSettings(); } /** Returns the builder for the settings used for calls to deleteTensorboardExperiment. */ public UnaryCallSettings.Builder<DeleteTensorboardExperimentRequest, Operation> deleteTensorboardExperimentSettings() { return getStubSettingsBuilder().deleteTensorboardExperimentSettings(); } /** Returns the builder for the settings used for calls to deleteTensorboardExperiment. */ public OperationCallSettings.Builder< DeleteTensorboardExperimentRequest, Empty, DeleteOperationMetadata> deleteTensorboardExperimentOperationSettings() { return getStubSettingsBuilder().deleteTensorboardExperimentOperationSettings(); } /** Returns the builder for the settings used for calls to createTensorboardRun. */ public UnaryCallSettings.Builder<CreateTensorboardRunRequest, TensorboardRun> createTensorboardRunSettings() { return getStubSettingsBuilder().createTensorboardRunSettings(); } /** Returns the builder for the settings used for calls to batchCreateTensorboardRuns. */ public UnaryCallSettings.Builder< BatchCreateTensorboardRunsRequest, BatchCreateTensorboardRunsResponse> batchCreateTensorboardRunsSettings() { return getStubSettingsBuilder().batchCreateTensorboardRunsSettings(); } /** Returns the builder for the settings used for calls to getTensorboardRun. */ public UnaryCallSettings.Builder<GetTensorboardRunRequest, TensorboardRun> getTensorboardRunSettings() { return getStubSettingsBuilder().getTensorboardRunSettings(); } /** Returns the builder for the settings used for calls to updateTensorboardRun. */ public UnaryCallSettings.Builder<UpdateTensorboardRunRequest, TensorboardRun> updateTensorboardRunSettings() { return getStubSettingsBuilder().updateTensorboardRunSettings(); } /** Returns the builder for the settings used for calls to listTensorboardRuns. */ public PagedCallSettings.Builder< ListTensorboardRunsRequest, ListTensorboardRunsResponse, ListTensorboardRunsPagedResponse> listTensorboardRunsSettings() { return getStubSettingsBuilder().listTensorboardRunsSettings(); } /** Returns the builder for the settings used for calls to deleteTensorboardRun. */ public UnaryCallSettings.Builder<DeleteTensorboardRunRequest, Operation> deleteTensorboardRunSettings() { return getStubSettingsBuilder().deleteTensorboardRunSettings(); } /** Returns the builder for the settings used for calls to deleteTensorboardRun. */ public OperationCallSettings.Builder< DeleteTensorboardRunRequest, Empty, DeleteOperationMetadata> deleteTensorboardRunOperationSettings() { return getStubSettingsBuilder().deleteTensorboardRunOperationSettings(); } /** Returns the builder for the settings used for calls to batchCreateTensorboardTimeSeries. */ public UnaryCallSettings.Builder< BatchCreateTensorboardTimeSeriesRequest, BatchCreateTensorboardTimeSeriesResponse> batchCreateTensorboardTimeSeriesSettings() { return getStubSettingsBuilder().batchCreateTensorboardTimeSeriesSettings(); } /** Returns the builder for the settings used for calls to createTensorboardTimeSeries. */ public UnaryCallSettings.Builder<CreateTensorboardTimeSeriesRequest, TensorboardTimeSeries> createTensorboardTimeSeriesSettings() { return getStubSettingsBuilder().createTensorboardTimeSeriesSettings(); } /** Returns the builder for the settings used for calls to getTensorboardTimeSeries. */ public UnaryCallSettings.Builder<GetTensorboardTimeSeriesRequest, TensorboardTimeSeries> getTensorboardTimeSeriesSettings() { return getStubSettingsBuilder().getTensorboardTimeSeriesSettings(); } /** Returns the builder for the settings used for calls to updateTensorboardTimeSeries. */ public UnaryCallSettings.Builder<UpdateTensorboardTimeSeriesRequest, TensorboardTimeSeries> updateTensorboardTimeSeriesSettings() { return getStubSettingsBuilder().updateTensorboardTimeSeriesSettings(); } /** Returns the builder for the settings used for calls to listTensorboardTimeSeries. */ public PagedCallSettings.Builder< ListTensorboardTimeSeriesRequest, ListTensorboardTimeSeriesResponse, ListTensorboardTimeSeriesPagedResponse> listTensorboardTimeSeriesSettings() { return getStubSettingsBuilder().listTensorboardTimeSeriesSettings(); } /** Returns the builder for the settings used for calls to deleteTensorboardTimeSeries. */ public UnaryCallSettings.Builder<DeleteTensorboardTimeSeriesRequest, Operation> deleteTensorboardTimeSeriesSettings() { return getStubSettingsBuilder().deleteTensorboardTimeSeriesSettings(); } /** Returns the builder for the settings used for calls to deleteTensorboardTimeSeries. */ public OperationCallSettings.Builder< DeleteTensorboardTimeSeriesRequest, Empty, DeleteOperationMetadata> deleteTensorboardTimeSeriesOperationSettings() { return getStubSettingsBuilder().deleteTensorboardTimeSeriesOperationSettings(); } /** * Returns the builder for the settings used for calls to batchReadTensorboardTimeSeriesData. */ public UnaryCallSettings.Builder< BatchReadTensorboardTimeSeriesDataRequest, BatchReadTensorboardTimeSeriesDataResponse> batchReadTensorboardTimeSeriesDataSettings() { return getStubSettingsBuilder().batchReadTensorboardTimeSeriesDataSettings(); } /** Returns the builder for the settings used for calls to readTensorboardTimeSeriesData. */ public UnaryCallSettings.Builder< ReadTensorboardTimeSeriesDataRequest, ReadTensorboardTimeSeriesDataResponse> readTensorboardTimeSeriesDataSettings() { return getStubSettingsBuilder().readTensorboardTimeSeriesDataSettings(); } /** Returns the builder for the settings used for calls to readTensorboardBlobData. */ public ServerStreamingCallSettings.Builder< ReadTensorboardBlobDataRequest, ReadTensorboardBlobDataResponse> readTensorboardBlobDataSettings() { return getStubSettingsBuilder().readTensorboardBlobDataSettings(); } /** Returns the builder for the settings used for calls to writeTensorboardExperimentData. */ public UnaryCallSettings.Builder< WriteTensorboardExperimentDataRequest, WriteTensorboardExperimentDataResponse> writeTensorboardExperimentDataSettings() { return getStubSettingsBuilder().writeTensorboardExperimentDataSettings(); } /** Returns the builder for the settings used for calls to writeTensorboardRunData. */ public UnaryCallSettings.Builder< WriteTensorboardRunDataRequest, WriteTensorboardRunDataResponse> writeTensorboardRunDataSettings() { return getStubSettingsBuilder().writeTensorboardRunDataSettings(); } /** Returns the builder for the settings used for calls to exportTensorboardTimeSeriesData. */ public PagedCallSettings.Builder< ExportTensorboardTimeSeriesDataRequest, ExportTensorboardTimeSeriesDataResponse, ExportTensorboardTimeSeriesDataPagedResponse> exportTensorboardTimeSeriesDataSettings() { return getStubSettingsBuilder().exportTensorboardTimeSeriesDataSettings(); } /** Returns the builder for the settings used for calls to listLocations. */ public PagedCallSettings.Builder< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings() { return getStubSettingsBuilder().listLocationsSettings(); } /** Returns the builder for the settings used for calls to getLocation. */ public UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings() { return getStubSettingsBuilder().getLocationSettings(); } /** Returns the builder for the settings used for calls to setIamPolicy. */ public UnaryCallSettings.Builder<SetIamPolicyRequest, Policy> setIamPolicySettings() { return getStubSettingsBuilder().setIamPolicySettings(); } /** Returns the builder for the settings used for calls to getIamPolicy. */ public UnaryCallSettings.Builder<GetIamPolicyRequest, Policy> getIamPolicySettings() { return getStubSettingsBuilder().getIamPolicySettings(); } /** Returns the builder for the settings used for calls to testIamPermissions. */ public UnaryCallSettings.Builder<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsSettings() { return getStubSettingsBuilder().testIamPermissionsSettings(); } @Override public TensorboardServiceSettings build() throws IOException { return new TensorboardServiceSettings(this); } } }
google/sagetv
36,491
java/sage/Playlist.java
/* * Copyright 2015 The SageTV Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package sage; import java.io.BufferedReader; import java.io.DataInput; import java.io.DataOutput; import java.io.File; import java.io.IOException; import java.net.URI; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.Vector; import java.util.regex.Matcher; import java.util.regex.Pattern; public class Playlist extends DBObject { public static final byte AIRING_SEGMENT = 1; public static final byte PLAYLIST_SEGMENT = 2; public static final byte ALBUM_SEGMENT = 3; // temp media files are only allowed in the nowplaying playlist public static final byte TEMPMEDIAFILE_SEGMENT = 4; Playlist(int inID) { super(inID); wiz = Wizard.getInstance(); name = ""; segmentTypes = Pooler.EMPTY_BYTE_ARRAY; segments = new Vector<Object>(); segmentTimes = new Vector<long[]>(); } Playlist(DataInput in, byte ver, Map<Integer, Integer> idMap) throws IOException { super(in, ver, idMap); wiz = Wizard.getInstance(); name = in.readUTF(); int numSegs = in.readInt(); segmentTypes = new byte[numSegs]; segments = new Vector<Object>(); segmentTimes = new Vector<long[]>(); for (int i = 0; i < numSegs; i++) { segmentTypes[i] = in.readByte(); if (segmentTypes[i] == ALBUM_SEGMENT) { int titleID = readID(in, idMap); int artistID = readID(in, idMap); int genreID = 0; int yearID = 0; if (ver > 0x2B) { genreID = readID(in, idMap); yearID = readID(in, idMap); } AlbumData ad = new AlbumData(wiz.getTitleForID(titleID), wiz.getPersonForID(artistID), wiz.getCategoryForID(genreID), wiz.getYearForID(yearID)); segments.add(ad); segmentTimes.add(new long[0]); } else { segments.add(new Integer(readID(in, idMap))); int numCuts = in.readInt(); long[] currTimes = new long[numCuts]; for (int j = 0; j < numCuts; j++) currTimes[j] = in.readLong(); segmentTimes.add(currTimes); } } if (ver > 0x45) { buildPlaylistProps(in.readUTF()); } } private void buildPlaylistProps(String str) { if (str != null && str.length() > 0) { if (playlistProps == null) playlistProps = new Properties(); else playlistProps.clear(); int currNameStart = 0; int currValueStart = -1; for (int i = 0; i < str.length(); i++) { char c = str.charAt(i); if (c == '\\') { // Escaped character, so skip the next one i++; continue; } else if (c == '=') { // We found the name=value delimeter, set the value start position currValueStart = i + 1; } else if (c == ';' && currValueStart != -1) { // We're at the end of the name value pair, get their values! String name = sage.media.format.ContainerFormat.unescapeString(str.substring(currNameStart, currValueStart - 1)); String value = sage.media.format.ContainerFormat.unescapeString(str.substring(currValueStart, i)); currNameStart = i + 1; currValueStart = -1; playlistProps.setProperty(name, value); } } } else if (playlistProps != null) playlistProps.clear(); } void write(DataOutput out, int flags) throws IOException { super.write(out, flags); boolean useLookupIdx = (flags & Wizard.WRITE_OPT_USE_ARRAY_INDICES) != 0; out.writeUTF(name); out.writeInt(segmentTypes.length); for (int i = 0; i < segmentTypes.length; i++) { out.writeByte(segmentTypes[i]); Object segElem = segments.get(i); if (segmentTypes[i] == ALBUM_SEGMENT) { AlbumData ad = (AlbumData) segElem; out.writeInt(ad.title != null ? (useLookupIdx ? ad.title.lookupIdx : ad.title.id) : 0); out.writeInt(ad.artist != null ? (useLookupIdx ? ad.artist.lookupIdx : ad.artist.id) : 0); out.writeInt(ad.genre != null ? (useLookupIdx ? ad.genre.lookupIdx : ad.genre.id) : 0); out.writeInt(ad.year != null ? (useLookupIdx ? ad.year.lookupIdx : ad.year.id) : 0); // there's no cut times for album segments } else { // playlist id or airing id out.writeInt(((Integer) segElem).intValue()); long[] currTimes = segmentTimes.get(i); out.writeInt(currTimes.length); for (int j = 0; j < currTimes.length; j++) out.writeLong(currTimes[j]); } } if (playlistProps == null) out.writeUTF(""); else { StringBuilder sb = new StringBuilder(); for (Map.Entry<Object, Object> ent : playlistProps.entrySet()) { sb.append(sage.media.format.MediaFormat.escapeString(ent.getKey().toString())); sb.append('='); sb.append(sage.media.format.MediaFormat.escapeString(ent.getValue().toString())); sb.append(';'); } out.writeUTF(sb.toString()); } } boolean validate() { boolean rv = true; for (int i = 0; i < segmentTypes.length; i++) { if (segmentTypes[i] == ALBUM_SEGMENT) { // We can't invalidate an album /*AlbumData a = (AlbumData) segments.get(i); if (a.getTitleStringer() == null || a.getArtistStringer() == null) { segmentTypes[i] = 0; rv = false; }*/ } else if (segmentTypes[i] == AIRING_SEGMENT) { if (wiz.getAiringForID(((Integer) segments.get(i)).intValue()) == null) { segmentTypes[i] = 0; rv = false; } } else if (segmentTypes[i] == PLAYLIST_SEGMENT) { if (wiz.getPlaylistForID(((Integer) segments.get(i)).intValue()) == null) { segmentTypes[i] = 0; rv = false; } } } if (!rv) { List<Byte> fixedTypes = new ArrayList<Byte>(); for (int i = 0; i < segmentTypes.length; i++) { if (segmentTypes[i] == 0) { segments.remove(fixedTypes.size()); segmentTimes.remove(fixedTypes.size()); } else fixedTypes.add(new Byte(segmentTypes[i])); } segmentTypes = new byte[fixedTypes.size()]; for (int i = 0; i < fixedTypes.size(); i++) segmentTypes[i] = fixedTypes.get(i); } return true; } synchronized void update(DBObject fromMe) { Playlist p = (Playlist) fromMe; name = p.name; segmentTypes = p.segmentTypes.clone(); segments = new Vector<Object>(p.segments); segmentTimes = new Vector<long[]>(p.segmentTimes); if (p.playlistProps != null) playlistProps = (Properties) p.playlistProps.clone(); else playlistProps = null; super.update(fromMe); } // Circular playlists should be allowed, its an easy way to do looped playback, just // be careful it doesn't kill us on recursion anywhere! public synchronized String toString() { return safeToString(new HashSet<Playlist>()); } private String safeToString(Set<Playlist> doneLists) { StringBuilder sb = new StringBuilder("Playlist["); sb.append(name); if (!doneLists.add(this)) { sb.append(']'); return sb.toString(); } sb.append(' '); for (int i = 0; i < segmentTypes.length; i++) { if (segmentTypes[i] == ALBUM_SEGMENT || segmentTypes[i] == TEMPMEDIAFILE_SEGMENT) sb.append(segments.get(i)); else if (segmentTypes[i] == AIRING_SEGMENT) sb.append(wiz.getAiringForID(((Integer) segments.get(i)).intValue())); else if (segmentTypes[i] == PLAYLIST_SEGMENT) { Playlist theList = wiz.getPlaylistForID(((Integer) segments.get(i)).intValue()); if (theList != null) sb.append(theList.safeToString(doneLists)); else sb.append("null"); } sb.append(", "); } sb.append(']'); return sb.toString(); } public String getName() { return name; } public synchronized Object[] getSegments() { List<Object> rv = new ArrayList<Object>(); for (int i = 0; i < segmentTypes.length; i++) { if (segmentTypes[i] == ALBUM_SEGMENT) { AlbumData allie = (AlbumData) segments.get(i); Album al = wiz.getCachedAlbum(allie.title, allie.artist); if (al == null) { // Create an album to fake this out rv.add(new Album(allie.title, allie.artist, allie.genre, allie.year)); } else rv.add(al); } else if (segmentTypes[i] == AIRING_SEGMENT) { Airing newAir = wiz.getAiringForID(((Integer) segments.get(i))); if (newAir != null) rv.add(newAir); } else if (segmentTypes[i] == PLAYLIST_SEGMENT) { Playlist listy = wiz.getPlaylistForID(((Integer) segments.get(i))); if (listy != null) rv.add(listy); } else if (segmentTypes[i] == TEMPMEDIAFILE_SEGMENT) rv.add(segments.get(i)); } return rv.toArray(); } public synchronized MediaFile[] getMediaFiles() { return getMediaFiles(new HashSet<Playlist>()); } synchronized MediaFile[] getMediaFiles(Set<Playlist> donePlaylists) { if (!donePlaylists.add(this)) return new MediaFile[0]; List<MediaFile> rv = new ArrayList<MediaFile>(); for (int i = 0; i < segmentTypes.length; i++) { if (segmentTypes[i] == ALBUM_SEGMENT) { AlbumData allie = (AlbumData) segments.get(i); Album a = wiz.getCachedAlbum(allie.title, allie.artist); if (a != null) { Airing[] airs = a.getAirings(); for (int j = 0; j < airs.length; j++) { MediaFile mf = wiz.getFileForAiring(airs[j]); if (mf != null) rv.add(mf); } } } else if (segmentTypes[i] == AIRING_SEGMENT) { MediaFile mf = wiz.getFileForAiring(wiz.getAiringForID(((Integer) segments.get(i)).intValue())); if (mf != null) rv.add(mf); } else if (segmentTypes[i] == PLAYLIST_SEGMENT) { Playlist listy = wiz.getPlaylistForID(((Integer) segments.get(i)).intValue()); if (listy != null) { rv.addAll(Arrays.asList(listy.getMediaFiles(donePlaylists))); } } else if (segmentTypes[i] == TEMPMEDIAFILE_SEGMENT) rv.add((MediaFile) segments.get(i)); } return rv.toArray(new MediaFile[0]); } public synchronized Object getSegment(int i) { if (segmentTypes.length == 0) return null; i = Math.max(0, Math.min(segmentTypes.length - 1, i)); if (segmentTypes[i] == ALBUM_SEGMENT) { AlbumData allie = (AlbumData) segments.get(i); Album a = wiz.getCachedAlbum(allie.title, allie.artist); if (a == null) { // Create an album to fake this out return new Album(allie.title, allie.artist, allie.genre, allie.year); } else return a; } else if (segmentTypes[i] == AIRING_SEGMENT) return wiz.getAiringForID(((Integer) segments.get(i)).intValue()); else if (segmentTypes[i] == PLAYLIST_SEGMENT) return wiz.getPlaylistForID(((Integer) segments.get(i)).intValue()); else if (segmentTypes[i] == TEMPMEDIAFILE_SEGMENT) return segments.get(i); return null; } public synchronized int getSegmentType(int i) { if (segmentTypes.length == 0) return 0; i = Math.max(0, Math.min(segmentTypes.length - 1, i)); return segmentTypes[i]; } public int getNumSegments() { return segmentTypes.length; } public synchronized void setName(String s) { name = s; if (id > 0) wiz.logUpdate(this, Wizard.PLAYLIST_CODE); } private void addToPlaylist(byte b, Object o, int idx) { idx = Math.max(0, Math.min(segmentTypes.length, idx)); byte[] newTypes = new byte[segmentTypes.length + 1]; if (idx == segmentTypes.length) System.arraycopy(segmentTypes, 0, newTypes, 0, segmentTypes.length); else if (idx == 0) System.arraycopy(segmentTypes, 0, newTypes, 1, segmentTypes.length); else { System.arraycopy(segmentTypes, 0, newTypes, 0, idx); System.arraycopy(segmentTypes, idx, newTypes, idx + 1, segmentTypes.length - idx); } newTypes[idx] = b; segmentTypes = newTypes; segments.insertElementAt(o, idx); segmentTimes.insertElementAt(new long[0], idx); if (id > 0) wiz.logUpdate(this, Wizard.PLAYLIST_CODE); } public synchronized void addToPlaylist(Airing addMe) { addToPlaylist(AIRING_SEGMENT, new Integer(addMe.id), segmentTypes.length); } public synchronized void addToPlaylist(Album addMe) { addToPlaylist(ALBUM_SEGMENT, new AlbumData(addMe.getTitleStringer(), addMe.getArtistObj(), addMe.getGenreStringer(), addMe.getYearStringer()), segmentTypes.length); } public synchronized void addToPlaylist(Playlist addMe) { addToPlaylist(PLAYLIST_SEGMENT, new Integer(addMe.id), segmentTypes.length); } public synchronized void addToPlaylist(MediaFile addMe) { // ONLY allow temporary media files for the now playing playlist if (addMe.generalType == MediaFile.MEDIAFILE_LOCAL_PLAYBACK && id == 0) addToPlaylist(TEMPMEDIAFILE_SEGMENT, addMe, segmentTypes.length); } public synchronized void insertIntoPlaylist(Airing addMe, int index) { addToPlaylist(AIRING_SEGMENT, new Integer(addMe.id), index); } public synchronized void insertIntoPlaylist(Album addMe, int index) { addToPlaylist(ALBUM_SEGMENT, new AlbumData(addMe.getTitleStringer(), addMe.getArtistObj(), addMe.getGenreStringer(), addMe.getYearStringer()), index); } public synchronized void insertIntoPlaylist(Playlist addMe, int index) { addToPlaylist(PLAYLIST_SEGMENT, new Integer(addMe.id), index); } public synchronized void insertIntoPlaylist(MediaFile addMe, int index) { // ONLY allow temporary media files for the now playing playlist if (addMe.generalType == MediaFile.MEDIAFILE_LOCAL_PLAYBACK && id == 0) addToPlaylist(TEMPMEDIAFILE_SEGMENT, addMe, index); } public synchronized void removeFromPlaylist(int idx) { if (segmentTypes.length == 0) return; idx = Math.max(0, Math.min(segmentTypes.length, idx)); byte[] newTypes = new byte[segmentTypes.length - 1]; if (idx == segmentTypes.length - 1) System.arraycopy(segmentTypes, 0, newTypes, 0, segmentTypes.length - 1); else if (idx == 0) System.arraycopy(segmentTypes, 1, newTypes, 0, segmentTypes.length - 1); else { System.arraycopy(segmentTypes, 0, newTypes, 0, idx); System.arraycopy(segmentTypes, idx + 1, newTypes, idx, segmentTypes.length - idx - 1); } segmentTypes = newTypes; segments.remove(idx); segmentTimes.remove(idx); if (id > 0) wiz.logUpdate(this, Wizard.PLAYLIST_CODE); } public synchronized void removeFromPlaylist(Object o) { if (o instanceof DBObject) o = new Integer(((DBObject) o).id); int x = segments.indexOf(o); if (x != -1) removeFromPlaylist(x); } public synchronized void clear() { if (segmentTypes.length == 0) return; segmentTypes = Pooler.EMPTY_BYTE_ARRAY; segments.clear(); segmentTimes.clear(); if (id > 0) wiz.logUpdate(this, Wizard.PLAYLIST_CODE); } public synchronized void movePlaylistSegment(int idx, boolean moveUp) { idx = Math.max(0, Math.min(segmentTypes.length, idx)); int newIdx = idx + (moveUp ? -1 : 1); newIdx = Math.max(0, Math.min(segmentTypes.length, newIdx)); if (newIdx == idx) return; byte swapType = segmentTypes[newIdx]; segmentTypes[newIdx] = segmentTypes[idx]; segmentTypes[idx] = swapType; segments.insertElementAt(segments.remove(idx), newIdx); segmentTimes.insertElementAt(segmentTimes.remove(idx), newIdx); if (id > 0) wiz.logUpdate(this, Wizard.PLAYLIST_CODE); } public String getProperty(String name) { if (playlistProps == null) return ""; String rv = playlistProps.getProperty(name); return (rv == null) ? "" : rv; } public synchronized void setProperty(String name, String value) { if (value == null && (playlistProps == null || !playlistProps.containsKey(name))) return; if (value != null && playlistProps != null && value.equals(playlistProps.getProperty(name))) return; if (value == null) { playlistProps.remove(name); } else { if (playlistProps == null) playlistProps = new Properties(); playlistProps.setProperty(name, value); } wiz.logUpdate(this, Wizard.PLAYLIST_CODE); } public Properties getProperties() { if (playlistProps == null) return new Properties(); return (Properties) playlistProps.clone(); } // Returns null if no requirement, a String describing it otherwise public String doesRequirePCAccess(UIManager uiMgr) { if (!VideoFrame.getEnablePC()) return null; String[] restrictions = VideoFrame.getPCRestrictions(); if (restrictions.length == 0) return null; Set<String> restrictionsSet = new HashSet<String>(); doesRequirePCAccessRecursable(restrictions, null, restrictionsSet); if (!restrictionsSet.isEmpty()) { StringBuilder sb = new StringBuilder(); for (String str : restrictionsSet) { str = Channel.convertPotentialStationIDToName(str); if (sb.length() == 0) sb.append(str); else { sb.append(", "); sb.append(str); } } return sb.toString(); } else return null; } private void doesRequirePCAccessRecursable(String[] pcRestrict, Set<Playlist> playlistSet, Set<String> restrictionsSet) { Playlist p; Airing a; for (int i = 0; i < segmentTypes.length; i++) { if (segmentTypes[i] == ALBUM_SEGMENT) { // these can't have pc restrictions' continue; } else if (segmentTypes[i] == AIRING_SEGMENT) { if ((a = wiz.getAiringForID(((Integer) segments.get(i)).intValue())) != null && !a.isMusic()) { String[] airDanger = a.getRatingRestrictables(); for (int j = 0; j < airDanger.length; j++) { if (Arrays.binarySearch(pcRestrict, airDanger[j]) >= 0) { restrictionsSet.add(airDanger[j]); } } } } else if (segmentTypes[i] == PLAYLIST_SEGMENT) { if ((p = wiz.getPlaylistForID(((Integer) segments.get(i)).intValue())) != null) { if (playlistSet != null && !playlistSet.add(p)) continue; p.doesRequirePCAccessRecursable(pcRestrict, playlistSet == null ? (playlistSet = new HashSet<Playlist>()) : playlistSet, restrictionsSet); } } else if (segmentTypes[i] == TEMPMEDIAFILE_SEGMENT) { // these can't have pc restrictions continue; } } } public boolean isMusicPlaylist() { return isMusicRecursable(null); } private boolean isMusicRecursable(Set<Playlist> set) { Playlist p; Airing a; for (int i = 0; i < segmentTypes.length; i++) { if (segmentTypes[i] == ALBUM_SEGMENT) { continue; } else if (segmentTypes[i] == AIRING_SEGMENT) { if ((a = wiz.getAiringForID(((Integer) segments.get(i)).intValue())) != null) { if (!a.isMusic()) return false; } } else if (segmentTypes[i] == PLAYLIST_SEGMENT) { if ((p = wiz.getPlaylistForID(((Integer) segments.get(i)).intValue())) != null) { if (set != null && !set.add(p)) continue; if (!p.isMusicRecursable(set == null ? new HashSet<Playlist>() : set)) return false; } } else if (segmentTypes[i] == TEMPMEDIAFILE_SEGMENT) { if (!((MediaFile) segments.get(i)).isMusic()) return false; } } return true; } // This will create a new playlist from the given playlist file unless a playlist already exists with that name. public static Playlist importPlaylist(File theFile, String prefixName) { // First generate the name for the playlist String listName = theFile.getName(); int lastDot = listName.lastIndexOf('.'); if (lastDot != -1) listName = listName.substring(0, lastDot); if (prefixName != null && prefixName.length() > 0) listName = prefixName + listName; boolean reimportPlaylists = Sage.getBoolean("fully_reimport_playlists_every_scan", false); Playlist existingPlaylist = null; Playlist[] allLists = Wizard.getInstance().getPlaylists(); for (int i = 0; i < allLists.length; i++) { if (allLists[i].name.equalsIgnoreCase(listName)) { if (reimportPlaylists) existingPlaylist = allLists[i]; else return allLists[i]; } } if (Sage.DBG) System.out.println("Creating playlist object for playlist file: " + theFile); List<String> missingSegments = new ArrayList<String>(); // Now add all of the elements of the playlist to the playlist. // If any of them are not there, do not import the playlist. Most likely those files // will be imported later in the scan, and then we can get the playlist on the next time around MediaFile[] mf = Wizard.getInstance().getFiles(); List<Airing> playlistItems = new ArrayList<Airing>(); if (theFile.getName().toLowerCase().endsWith(".m3u")) { // M3U playlist BufferedReader inStream = null; try { // The default is supposed to be Latin-1; but many people use UTF8 in there so we should try to detect that inStream = IOUtils.openReaderDetectCharset(theFile, Sage.I18N_CHARSET); String line = inStream.readLine(); while (line != null) { line = line.trim(); if (line.length() > 0 && !line.startsWith("#") && !line.startsWith("http://") && !line.startsWith("mms://") && !line.startsWith("rtsp://") && !line.startsWith("rtp://")) { // # indicates it's a comment File f = new File(line); if (!f.isAbsolute()) { // NOTE: Workaround for bug in Java where new File("C:\\foo\\", "\\foo.txt") resolves to C:\foo\foo.txt if (Sage.WINDOWS_OS && line.startsWith("\\") && line.length() > 1 && line.charAt(1) != '\\') { File parentRoot = theFile.getParentFile(); while (parentRoot.getParentFile() != null) parentRoot = parentRoot.getParentFile(); f = new File(parentRoot, line); } else { // Change it to be relative to the M3u file f = new File(theFile.getParentFile(), line); } f = f.getCanonicalFile(); } // Find the MediaFile for this playlist element boolean foundFile = false; MediaFile matchMF = Wizard.getInstance().getFileForFilePath(f); if (matchMF != null) { playlistItems.add(matchMF.getContentAiring()); foundFile = true; } if (!foundFile) { // In case there's extra stuff before the comment marker if (line.indexOf('#') == -1) { if (Sage.DBG) System.out.println("Missing element in playlist, ignoring that element - playlist: " + theFile + " element: " + line + " resolvedPath=" + f); missingSegments.add(f.getAbsolutePath()); //return null; } } else { if (Sage.DBG) System.out.println("Found file to add to playlist: " + line); } } line = inStream.readLine(); } } catch (IOException e) { System.out.println("Error parsing playlist file " + theFile + " of " + e.toString()); return null; } finally { if (inStream != null) { try { inStream.close(); } catch (Exception e){} } inStream = null; } } else if (theFile.getName().toLowerCase().endsWith(".asx") || theFile.getName().toLowerCase().endsWith(".wax") || theFile.getName().toLowerCase().endsWith(".wvx")) { // Windows Media playlist // Read the contents of the whole playlist file String fileStr = IOUtils.getFileAsString(theFile); // The files in these playlists have a structure we're looking for like this: /* * <Entry> * <Ref href="file:///C:/mp3s/getdown.asf" /> * </Entry> * * So we need to regex parse it to find all of the Entry elements, and then find the first * Ref element inside it and get the href attribute from it */ Pattern pat = Pattern.compile( "\\<\\s*ENTRY\\s*\\>.*?\\<\\s*REF.*?HREF\\s*\\=\\s*\\\"\\s*(.*?)\\s*\\\".*?\\/\\>.*?\\<\\/\\s*ENTRY\\s*\\>", Pattern.CASE_INSENSITIVE | Pattern.DOTALL); Matcher match = pat.matcher(fileStr); while (match.find()) { String currURL = match.group(1); if (Sage.DBG) System.out.println("Found URL to add to playlist: " + currURL); if (currURL.startsWith("http://") || currURL.startsWith("mms://") || currURL.startsWith("rtsp://") || currURL.startsWith("rtp://")) continue; try { File f; if (currURL.indexOf(":") != -1) { URI uri = new URI(currURL); f = new File(uri); } else { f = new File(currURL); try { if (!f.isAbsolute()) f = new File(theFile.getParentFile(), currURL); f = f.getCanonicalFile(); }catch (IOException e) { if (Sage.DBG) System.out.println("Error getting file path to:" + currURL); return null; } } // Find the MediaFile for this playlist element boolean foundFile = false; MediaFile matchMF = Wizard.getInstance().getFileForFilePath(f); if (matchMF != null) { playlistItems.add(matchMF.getContentAiring()); foundFile = true; } if (!foundFile) { if (Sage.DBG) System.out.println("Missing element in playlist, ignoring that element - playlist: " + theFile + " element: " + currURL + " resolvedPath=" + f); missingSegments.add(f.getAbsolutePath()); //return null; } else { if (Sage.DBG) System.out.println("Found file to add to playlist: " + currURL); } } catch (Exception e) { if (Sage.DBG) System.out.println("Ignoring playlist: " + theFile + " due to bad URL: " + currURL); return null; } } } else if (theFile.getName().toLowerCase().endsWith(".wpl")) { // Windows Media playlist // Read the contents of the whole playlist file String fileStr = IOUtils.getFileAsString(theFile); // The files in these playlists have a structure we're looking for like this: /* * <media src="../foobar/test.wma" /> * * So we need to regex parse it to find all of the src attributes on media elements */ Pattern pat = Pattern.compile( "<\\s*media.*?src\\s*\\=\\s*\\\"\\s*(.*?)\\s*\\\".*?\\/\\>", Pattern.CASE_INSENSITIVE | Pattern.DOTALL); Matcher match = pat.matcher(fileStr); while (match.find()) { String currPath = match.group(1); // Fix the HTML entity reference stuff currPath = currPath.replaceAll("\\&apos\\;", "'"); currPath = currPath.replaceAll("\\&quot\\;", "\""); currPath = currPath.replaceAll("\\&lt\\;", "<"); currPath = currPath.replaceAll("\\&gt\\;", ">"); currPath = currPath.replaceAll("\\&amp\\;", "&"); // Also fix any UTF-8 issues File f; try { currPath = new String(currPath.getBytes(), "UTF-8"); f = new File(currPath); if (!f.isAbsolute()) f = new File(theFile.getParentFile(), currPath); f = f.getCanonicalFile(); }catch (IOException e) { if (Sage.DBG) System.out.println("Error getting file path to:" + currPath); return null; } // Find the MediaFile for this playlist element boolean foundFile = false; MediaFile matchMF = Wizard.getInstance().getFileForFilePath(f); if (matchMF != null) { playlistItems.add(matchMF.getContentAiring()); foundFile = true; } if (!foundFile) { if (Sage.DBG) System.out.println("Missing element in playlist, ignoring that element - playlist: " + theFile + " element: " + currPath + " resolvedPath=" + f); missingSegments.add(f.getAbsolutePath()); //return null; } else { if (Sage.DBG) System.out.println("Found file to add to playlist: " + currPath); } } } else { System.out.println("Invalid playlist format: " + theFile); return null; } if (playlistItems.size() == 0) { if (Sage.DBG) System.out.println("Ignoring playlist due to zero size: " + theFile); SeekerSelector.getInstance().addIgnoreFile(theFile); return null; } Playlist rv; if (reimportPlaylists && existingPlaylist != null) { rv = existingPlaylist; rv.clear(); } else rv = Wizard.getInstance().addPlaylist(listName); for (int i = 0; i < playlistItems.size(); i++) { Object obj = playlistItems.get(i); if (obj instanceof Airing) rv.addToPlaylist((Airing) obj); else if (obj instanceof Album) rv.addToPlaylist((Album) obj); else if (obj instanceof Playlist) rv.addToPlaylist((Playlist) obj); } for (int i = 0; i < missingSegments.size(); i++) { sage.msg.MsgManager.postMessage(sage.msg.SystemMessage.createPlaylistMissingSegmentMsg(theFile.getAbsolutePath(), missingSegments.get(i).toString())); } return rv; } // This checks to make sure all of the Airing objects inside of the Playlist that should correspond // to MediaFiles actually do. If they don't, then an attempt is made to find the MediaFile that they should match // and re-associate it. This can easily happen by doing a re-import of your music library. public boolean verifyPlaylist() { return verifyPlaylist(new HashSet<Playlist>()); } private boolean verifyPlaylist(Set<Playlist> donePlaylists) { if (!donePlaylists.add(this)) return true; boolean status = false; for (int i = 0; i < segmentTypes.length; i++) { if (segmentTypes[i] == ALBUM_SEGMENT) { // We don't need to check albums since they're referenced by name and not ID status = true; } else if (segmentTypes[i] == TEMPMEDIAFILE_SEGMENT) { status = true; } else if (segmentTypes[i] == AIRING_SEGMENT) { Airing aid = wiz.getAiringForID(((Integer) segments.get(i)).intValue()); MediaFile mf = wiz.getFileForAiring(aid); if (mf == null && (aid.isMusic() || (!aid.isMusic() && aid.getEndTime() < Sage.time()))) { Show testShow = aid.getShow(); // OK, we've got an airing segment that doesn't match an Airing object anymore. First find all of // the Show objects that have matching data for this Airing. (just check title & episode name, if multiple // ones match on that then go into more detail and even use the Airing times if required, be sure anything // checked against also has a MediaFile object itself) DBObject[] rawShows = wiz.getRawAccess(Wizard.SHOW_CODE, (byte)0); Airing bestAirMatch = null; for (int j = 0; j < rawShows.length; j++) { Show s = (Show) rawShows[j]; if (s != null && testShow.title == s.title && testShow.getEpisodeName().equals(s.getEpisodeName())) { Airing[] currAirs = wiz.getAirings(s, 0); if (currAirs != null && currAirs.length > 0) { for (int k = 0; k < currAirs.length; k++) { if (wiz.getFileForAiring(currAirs[k]) != null) { if (bestAirMatch == null) bestAirMatch = currAirs[k]; else { // First check on airing start time since file modification times aren't easily changed without retagging and // have almost no chance of false positives if (bestAirMatch.getStartTime() != aid.getStartTime() && currAirs[k].getStartTime() == aid.getStartTime()) { bestAirMatch = currAirs[k]; } else if (bestAirMatch.getStartTime() != aid.getStartTime()) { // Next check is for a matching track number if (bestAirMatch.partsB != aid.partsB && currAirs[k].partsB == aid.partsB) { bestAirMatch = currAirs[k]; } // Otherwise, just keep what we've got. } // Otherwise, just keep what we've got. } } } } } } if (bestAirMatch != null) { if (Sage.DBG) System.out.println("Playlist is swapping out old invalid Airing for a new one, old=" + aid + " new=" + bestAirMatch); segments.setElementAt(new Integer(bestAirMatch.id), i); if (id > 0) wiz.logUpdate(this, Wizard.PLAYLIST_CODE); status = true; } } else status = true; } else if (segmentTypes[i] == PLAYLIST_SEGMENT) { Playlist listy = wiz.getPlaylistForID(((Integer) segments.get(i)).intValue()); if (listy != null) { status |= listy.verifyPlaylist(donePlaylists); } } } return status; } public boolean isSingleItemPlaylist() { return segmentTypes.length == 1 && segmentTypes[0] == AIRING_SEGMENT; } private class AlbumData { public AlbumData(Stringer t, Person ar, Stringer g, Stringer y) { title = t; artist = ar; genre = g; year = y; } Stringer title; Person artist; Stringer genre; Stringer year; public String toString() { return Sage.rez("Song_By_Artist", new Object[] { (title == null ? "" : title.name), (artist == null ? "" : artist.name) }); } } String name; byte[] segmentTypes; // Consists of airingIDs, playlistIDs or Albums Vector<Object> segments; // each element is a long[] with even size, pairs representing start/stop times, but this // won't work well until we have more accurate seeking in our demux Vector<long[]> segmentTimes; Properties playlistProps; private Wizard wiz; }
googleapis/google-cloud-java
35,223
java-speech/proto-google-cloud-speech-v1p1beta1/src/main/java/com/google/cloud/speech/v1p1beta1/SpeechProto.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/speech/v1p1beta1/cloud_speech.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.speech.v1p1beta1; public final class SpeechProto { private SpeechProto() {} public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {} public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) { registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry); } static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_speech_v1p1beta1_RecognizeRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_speech_v1p1beta1_RecognizeRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_speech_v1p1beta1_LongRunningRecognizeRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_speech_v1p1beta1_LongRunningRecognizeRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_speech_v1p1beta1_TranscriptOutputConfig_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_speech_v1p1beta1_TranscriptOutputConfig_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_speech_v1p1beta1_StreamingRecognizeRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_speech_v1p1beta1_StreamingRecognizeRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_speech_v1p1beta1_StreamingRecognitionConfig_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_speech_v1p1beta1_StreamingRecognitionConfig_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_speech_v1p1beta1_StreamingRecognitionConfig_VoiceActivityTimeout_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_speech_v1p1beta1_StreamingRecognitionConfig_VoiceActivityTimeout_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_speech_v1p1beta1_RecognitionConfig_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_speech_v1p1beta1_RecognitionConfig_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_speech_v1p1beta1_SpeakerDiarizationConfig_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_speech_v1p1beta1_SpeakerDiarizationConfig_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_speech_v1p1beta1_RecognitionMetadata_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_speech_v1p1beta1_RecognitionMetadata_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_speech_v1p1beta1_SpeechContext_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_speech_v1p1beta1_SpeechContext_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_speech_v1p1beta1_RecognitionAudio_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_speech_v1p1beta1_RecognitionAudio_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_speech_v1p1beta1_RecognizeResponse_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_speech_v1p1beta1_RecognizeResponse_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_speech_v1p1beta1_LongRunningRecognizeResponse_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_speech_v1p1beta1_LongRunningRecognizeResponse_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_speech_v1p1beta1_LongRunningRecognizeMetadata_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_speech_v1p1beta1_LongRunningRecognizeMetadata_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_speech_v1p1beta1_StreamingRecognizeResponse_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_speech_v1p1beta1_StreamingRecognizeResponse_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_speech_v1p1beta1_StreamingRecognitionResult_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_speech_v1p1beta1_StreamingRecognitionResult_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_speech_v1p1beta1_SpeechRecognitionResult_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_speech_v1p1beta1_SpeechRecognitionResult_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_speech_v1p1beta1_SpeechRecognitionAlternative_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_speech_v1p1beta1_SpeechRecognitionAlternative_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_speech_v1p1beta1_WordInfo_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_speech_v1p1beta1_WordInfo_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_speech_v1p1beta1_SpeechAdaptationInfo_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_speech_v1p1beta1_SpeechAdaptationInfo_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n" + "0google/cloud/speech/v1p1beta1/cloud_speech.proto\022\035google.cloud.speech.v1p1beta" + "1\032\034google/api/annotations.proto\032\027google/" + "api/client.proto\032\037google/api/field_behav" + "ior.proto\032,google/cloud/speech/v1p1beta1/resource.proto\032#google/longrunning/oper" + "ations.proto\032\036google/protobuf/duration.p" + "roto\032\037google/protobuf/timestamp.proto\032\036g" + "oogle/protobuf/wrappers.proto\032\027google/rpc/status.proto\"\236\001\n" + "\020RecognizeRequest\022E\n" + "\006config\030\001" + " \001(\01320.google.cloud.speech.v1p1beta1.RecognitionConfigB\003\340A\002\022C\n" + "\005audio\030\002 \001(" + "\0132/.google.cloud.speech.v1p1beta1.RecognitionAudioB\003\340A\002\"\374\001\n" + "\033LongRunningRecognizeRequest\022E\n" + "\006config\030\001 \001(\01320.google.cloud.s" + "peech.v1p1beta1.RecognitionConfigB\003\340A\002\022C\n" + "\005audio\030\002" + " \001(\0132/.google.cloud.speech.v1p1beta1.RecognitionAudioB\003\340A\002\022Q\n\r" + "output_config\030\004" + " \001(\01325.google.cloud.speech.v1p1beta1.TranscriptOutputConfigB\003\340A\001\":\n" + "\026TranscriptOutputConfig\022\021\n" + "\007gcs_uri\030\001 \001(\tH\000B\r\n" + "\013output_type\"\240\001\n" + "\031StreamingRecognizeRequest\022U\n" + "\020streaming_config\030\001 \001(\01329.google.clou" + "d.speech.v1p1beta1.StreamingRecognitionConfigH\000\022\027\n\r" + "audio_content\030\002 \001(\014H\000B\023\n" + "\021streaming_request\"\265\003\n" + "\032StreamingRecognitionConfig\022E\n" + "\006config\030\001" + " \001(\01320.google.cloud.speech.v1p1beta1.RecognitionConfigB\003\340A\002\022\030\n" + "\020single_utterance\030\002 \001(\010\022\027\n" + "\017interim_results\030\003 \001(\010\022$\n" + "\034enable_voice_activity_events\030\005 \001(\010\022n\n" + "\026voice_activity_timeout\030\006 \001(\0132N.g" + "oogle.cloud.speech.v1p1beta1.StreamingRecognitionConfig.VoiceActivityTimeout\032\206\001\n" + "\024VoiceActivityTimeout\0227\n" + "\024speech_start_timeout\030\001 \001(\0132\031.google.protobuf.Duration\0225\n" + "\022speech_end_timeout\030\002 \001(\0132\031.google.protobuf.Duration\"\315\t\n" + "\021RecognitionConfig\022P\n" + "\010encoding\030\001" + " \001(\0162>.google.cloud.speech.v1p1beta1.RecognitionConfig.AudioEncoding\022\031\n" + "\021sample_rate_hertz\030\002 \001(\005\022\033\n" + "\023audio_channel_count\030\007 \001(\005\022/\n" + "\'enable_separate_recognition_per_channel\030\014 \001(\010\022\032\n\r" + "language_code\030\003 \001(\tB\003\340A\002\022\"\n" + "\032alternative_language_codes\030\022 \003(\t\022\030\n" + "\020max_alternatives\030\004 \001(\005\022\030\n" + "\020profanity_filter\030\005 \001(\010\022C\n\n" + "adaptation\030\024 \001(\0132/.google.cloud.speech.v1p1beta1.SpeechAdaptation\022]\n" + "\030transcript_normalization\030\030 \001(" + "\01326.google.cloud.speech.v1p1beta1.TranscriptNormalizationB\003\340A\001\022E\n" + "\017speech_contexts\030\006" + " \003(\0132,.google.cloud.speech.v1p1beta1.SpeechContext\022 \n" + "\030enable_word_time_offsets\030\010 \001(\010\022\036\n" + "\026enable_word_confidence\030\017 \001(\010\022$\n" + "\034enable_automatic_punctuation\030\013 \001(\010\022=\n" + "\031enable_spoken_punctuation\030\026 \001(\0132\032.google.protobuf.BoolValue\0228\n" + "\024enable_spoken_emojis\030\027 \001(\0132\032.google.protobuf.BoolValue\022&\n" + "\032enable_speaker_diarization\030\020 \001(\010B\002\030\001\022%\n" + "\031diarization_speaker_count\030\021 \001(\005B\002\030\001\022S\n" + "\022diarization_config\030\023 \001(\01327.google.cloud" + ".speech.v1p1beta1.SpeakerDiarizationConfig\022D\n" + "\010metadata\030\t" + " \001(\01322.google.cloud.speech.v1p1beta1.RecognitionMetadata\022\r\n" + "\005model\030\r" + " \001(\t\022\024\n" + "\014use_enhanced\030\016 \001(\010\"\255\001\n\r" + "AudioEncoding\022\030\n" + "\024ENCODING_UNSPECIFIED\020\000\022\014\n" + "\010LINEAR16\020\001\022\010\n" + "\004FLAC\020\002\022\t\n" + "\005MULAW\020\003\022\007\n" + "\003AMR\020\004\022\n\n" + "\006AMR_WB\020\005\022\014\n" + "\010OGG_OPUS\020\006\022\032\n" + "\026SPEEX_WITH_HEADER_BYTE\020\007\022\007\n" + "\003MP3\020\010\022\r\n" + "\tWEBM_OPUS\020\t\022\010\n" + "\004ALAW\020\n" + "\"\220\001\n" + "\030SpeakerDiarizationConfig\022\"\n" + "\032enable_speaker_diarization\030\001 \001(\010\022\031\n" + "\021min_speaker_count\030\002 \001(\005\022\031\n" + "\021max_speaker_count\030\003 \001(\005\022\032\n" + "\013speaker_tag\030\005 \001(\005B\005\030\001\340A\003\"\333\010\n" + "\023RecognitionMetadata\022\\\n" + "\020interaction_type\030\001 \001" + "(\0162B.google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType\022$\n" + "\034industry_naics_code_of_audio\030\003 \001(\r" + "\022b\n" + "\023microphone_distance\030\004 \001(\0162E.google.cloud.speech." + "v1p1beta1.RecognitionMetadata.MicrophoneDistance\022a\n" + "\023original_media_type\030\005 \001(\0162D." + "google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType\022e\n" + "\025recording_device_type\030\006 \001(\0162F.google.cloud.speech" + ".v1p1beta1.RecognitionMetadata.RecordingDeviceType\022\035\n" + "\025recording_device_name\030\007 \001(\t\022\032\n" + "\022original_mime_type\030\010 \001(\t\022\031\n\r" + "obfuscated_id\030\t \001(\003B\002\030\001\022\023\n" + "\013audio_topic\030\n" + " \001(\t\"\305\001\n" + "\017InteractionType\022 \n" + "\034INTERACTION_TYPE_UNSPECIFIED\020\000\022\016\n\n" + "DISCUSSION\020\001\022\020\n" + "\014PRESENTATION\020\002\022\016\n\n" + "PHONE_CALL\020\003\022\r\n" + "\tVOICEMAIL\020\004\022\033\n" + "\027PROFESSIONALLY_PRODUCED\020\005\022\020\n" + "\014VOICE_SEARCH\020\006\022\021\n\r" + "VOICE_COMMAND\020\007\022\r\n" + "\tDICTATION\020\010\"d\n" + "\022MicrophoneDistance\022#\n" + "\037MICROPHONE_DISTANCE_UNSPECIFIED\020\000\022\r\n" + "\tNEARFIELD\020\001\022\014\n" + "\010MIDFIELD\020\002\022\014\n" + "\010FARFIELD\020\003\"N\n" + "\021OriginalMediaType\022#\n" + "\037ORIGINAL_MEDIA_TYPE_UNSPECIFIED\020\000\022\t\n" + "\005AUDIO\020\001\022\t\n" + "\005VIDEO\020\002\"\244\001\n" + "\023RecordingDeviceType\022%\n" + "!RECORDING_DEVICE_TYPE_UNSPECIFIED\020\000\022\016\n\n" + "SMARTPHONE\020\001\022\006\n" + "\002PC\020\002\022\016\n\n" + "PHONE_LINE\020\003\022\013\n" + "\007VEHICLE\020\004\022\030\n" + "\024OTHER_OUTDOOR_DEVICE\020\005\022\027\n" + "\023OTHER_INDOOR_DEVICE\020\006:\002\030\001\"/\n\r" + "SpeechContext\022\017\n" + "\007phrases\030\001 \003(\t\022\r\n" + "\005boost\030\004 \001(\002\"D\n" + "\020RecognitionAudio\022\021\n" + "\007content\030\001 \001(\014H\000\022\r" + "\n" + "\003uri\030\002 \001(\tH\000B\016\n" + "\014audio_source\"\230\002\n" + "\021RecognizeResponse\022G\n" + "\007results\030\002 \003(\01326.google.cl" + "oud.speech.v1p1beta1.SpeechRecognitionResult\0224\n" + "\021total_billed_time\030\003 \001(\0132\031.google.protobuf.Duration\022S\n" + "\026speech_adaptation_info\030\007" + " \001(\01323.google.cloud.speech.v1p1beta1.SpeechAdaptationInfo\022\022\n\n" + "request_id\030\010 \001(\003\022\033\n" + "\023using_legacy_models\030\t \001(\010\"\376\002\n" + "\034LongRunningRecognizeResponse\022G\n" + "\007results\030\002 \003" + "(\01326.google.cloud.speech.v1p1beta1.SpeechRecognitionResult\0224\n" + "\021total_billed_time\030\003 \001(\0132\031.google.protobuf.Duration\022L\n\r" + "output_config\030\006" + " \001(\01325.google.cloud.speech.v1p1beta1.TranscriptOutputConfig\022(\n" + "\014output_error\030\007 \001(\0132\022.google.rpc.Status\022S\n" + "\026speech_adaptation_info\030\010" + " \001(\01323.google.cloud.speech.v1p1beta1.SpeechAdaptationInfo\022\022\n" + "\n" + "request_id\030\t \001(\003\"\203\002\n" + "\034LongRunningRecognizeMetadata\022\030\n" + "\020progress_percent\030\001 \001(\005\022.\n\n" + "start_time\030\002 \001(\0132\032.google.protobuf.Timestamp\0224\n" + "\020last_update_time\030\003 \001(\0132\032.google.protobuf.Timestamp\022\020\n" + "\003uri\030\004 \001(\tB\003\340A\003\022Q\n\r" + "output_config\030\005 \001(\01325.google.cloud.speec" + "h.v1p1beta1.TranscriptOutputConfigB\003\340A\003\"\346\004\n" + "\032StreamingRecognizeResponse\022!\n" + "\005error\030\001 \001(\0132\022.google.rpc.Status\022J\n" + "\007results\030\002 \003" + "(\01329.google.cloud.speech.v1p1beta1.StreamingRecognitionResult\022d\n" + "\021speech_event_type\030\004 \001(\0162I.google.cloud.speech.v1p1beta1" + ".StreamingRecognizeResponse.SpeechEventType\0224\n" + "\021speech_event_time\030\010 \001(\0132\031.google.protobuf.Duration\0224\n" + "\021total_billed_time\030\005 \001(\0132\031.google.protobuf.Duration\022S\n" + "\026speech_adaptation_info\030\t" + " \001(\01323.google.cloud.speech.v1p1beta1.SpeechAdaptationInfo\022\022\n\n" + "request_id\030\n" + " \001(\003\"\235\001\n" + "\017SpeechEventType\022\034\n" + "\030SPEECH_EVENT_UNSPECIFIED\020\000\022\033\n" + "\027END_OF_SINGLE_UTTERANCE\020\001\022\031\n" + "\025SPEECH_ACTIVITY_BEGIN\020\002\022\027\n" + "\023SPEECH_ACTIVITY_END\020\003\022\033\n" + "\027SPEECH_ACTIVITY_TIMEOUT\020\004\"\371\001\n" + "\032StreamingRecognitionResult\022Q\n" + "\014alternatives\030\001 \003(\0132;.google.c" + "loud.speech.v1p1beta1.SpeechRecognitionAlternative\022\020\n" + "\010is_final\030\002 \001(\010\022\021\n" + "\tstability\030\003 \001(\002\0222\n" + "\017result_end_time\030\004 \001(\0132\031.google.protobuf.Duration\022\023\n" + "\013channel_tag\030\005 \001(\005\022\032\n\r" + "language_code\030\006 \001(\tB\003\340A\003\"\321\001\n" + "\027SpeechRecognitionResult\022Q\n" + "\014alternatives\030\001 \003(\0132;" + ".google.cloud.speech.v1p1beta1.SpeechRecognitionAlternative\022\023\n" + "\013channel_tag\030\002 \001(\005\0222\n" + "\017result_end_time\030\004 \001(\0132\031.google.protobuf.Duration\022\032\n\r" + "language_code\030\005 \001(\tB\003\340A\003\"~\n" + "\034SpeechRecognitionAlternative\022\022\n\n" + "transcript\030\001 \001(\t\022\022\n\n" + "confidence\030\002 \001(\002\0226\n" + "\005words\030\003 \003(\0132\'.google.cloud.speech.v1p1beta1.WordInfo\"\300\001\n" + "\010WordInfo\022-\n\n" + "start_time\030\001 \001(\0132\031.google.protobuf.Duration\022+\n" + "\010end_time\030\002 \001(\0132\031.google.protobuf.Duration\022\014\n" + "\004word\030\003 \001(\t\022\022\n\n" + "confidence\030\004 \001(\002\022\032\n" + "\013speaker_tag\030\005 \001(\005B\005\030\001\340A\003\022\032\n\r" + "speaker_label\030\006 \001(\tB\003\340A\003\"K\n" + "\024SpeechAdaptationInfo\022\032\n" + "\022adaptation_timeout\030\001 \001(\010\022\027\n" + "\017timeout_message\030\004 \001(\t2\202\005\n" + "\006Speech\022\245\001\n" + "\tRecognize\022/.google.cloud.speech.v1p1beta1.RecognizeRequest\0320.goo" + "gle.cloud.speech.v1p1beta1.RecognizeResponse\"5\332A\014config,audio\202\323\344\223\002" + " \"\033/v1p1beta1/speech:recognize:\001*\022\362\001\n" + "\024LongRunningRecognize\022:.google.cloud.speech.v1p1beta1.Lon" + "gRunningRecognizeRequest\032\035.google.longrunning.Operation\"\177\312A<\n" + "\034LongRunningRecognizeResponse\022\034LongRunningRecognizeMetadata" + "\332A\014config,audio\202\323\344\223\002+\"&/v1p1beta1/speech:longrunningrecognize:\001*\022\217\001\n" + "\022StreamingRecognize\0228.google.cloud.speech.v1p1beta1." + "StreamingRecognizeRequest\0329.google.cloud.speech.v1p1beta1.StreamingRecognizeResp" + "onse\"\000(\0010\001\032I\312A\025speech.googleapis.com\322A.h" + "ttps://www.googleapis.com/auth/cloud-platformBs\n" + "!com.google.cloud.speech.v1p1beta1B\013SpeechProtoP\001Z9cloud.google.com/go/s" + "peech/apiv1p1beta1/speechpb;speechpb\242\002\003GCSb\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { com.google.api.AnnotationsProto.getDescriptor(), com.google.api.ClientProto.getDescriptor(), com.google.api.FieldBehaviorProto.getDescriptor(), com.google.cloud.speech.v1p1beta1.SpeechResourceProto.getDescriptor(), com.google.longrunning.OperationsProto.getDescriptor(), com.google.protobuf.DurationProto.getDescriptor(), com.google.protobuf.TimestampProto.getDescriptor(), com.google.protobuf.WrappersProto.getDescriptor(), com.google.rpc.StatusProto.getDescriptor(), }); internal_static_google_cloud_speech_v1p1beta1_RecognizeRequest_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_google_cloud_speech_v1p1beta1_RecognizeRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_speech_v1p1beta1_RecognizeRequest_descriptor, new java.lang.String[] { "Config", "Audio", }); internal_static_google_cloud_speech_v1p1beta1_LongRunningRecognizeRequest_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_google_cloud_speech_v1p1beta1_LongRunningRecognizeRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_speech_v1p1beta1_LongRunningRecognizeRequest_descriptor, new java.lang.String[] { "Config", "Audio", "OutputConfig", }); internal_static_google_cloud_speech_v1p1beta1_TranscriptOutputConfig_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_google_cloud_speech_v1p1beta1_TranscriptOutputConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_speech_v1p1beta1_TranscriptOutputConfig_descriptor, new java.lang.String[] { "GcsUri", "OutputType", }); internal_static_google_cloud_speech_v1p1beta1_StreamingRecognizeRequest_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_google_cloud_speech_v1p1beta1_StreamingRecognizeRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_speech_v1p1beta1_StreamingRecognizeRequest_descriptor, new java.lang.String[] { "StreamingConfig", "AudioContent", "StreamingRequest", }); internal_static_google_cloud_speech_v1p1beta1_StreamingRecognitionConfig_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_google_cloud_speech_v1p1beta1_StreamingRecognitionConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_speech_v1p1beta1_StreamingRecognitionConfig_descriptor, new java.lang.String[] { "Config", "SingleUtterance", "InterimResults", "EnableVoiceActivityEvents", "VoiceActivityTimeout", }); internal_static_google_cloud_speech_v1p1beta1_StreamingRecognitionConfig_VoiceActivityTimeout_descriptor = internal_static_google_cloud_speech_v1p1beta1_StreamingRecognitionConfig_descriptor .getNestedTypes() .get(0); internal_static_google_cloud_speech_v1p1beta1_StreamingRecognitionConfig_VoiceActivityTimeout_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_speech_v1p1beta1_StreamingRecognitionConfig_VoiceActivityTimeout_descriptor, new java.lang.String[] { "SpeechStartTimeout", "SpeechEndTimeout", }); internal_static_google_cloud_speech_v1p1beta1_RecognitionConfig_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_google_cloud_speech_v1p1beta1_RecognitionConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_speech_v1p1beta1_RecognitionConfig_descriptor, new java.lang.String[] { "Encoding", "SampleRateHertz", "AudioChannelCount", "EnableSeparateRecognitionPerChannel", "LanguageCode", "AlternativeLanguageCodes", "MaxAlternatives", "ProfanityFilter", "Adaptation", "TranscriptNormalization", "SpeechContexts", "EnableWordTimeOffsets", "EnableWordConfidence", "EnableAutomaticPunctuation", "EnableSpokenPunctuation", "EnableSpokenEmojis", "EnableSpeakerDiarization", "DiarizationSpeakerCount", "DiarizationConfig", "Metadata", "Model", "UseEnhanced", }); internal_static_google_cloud_speech_v1p1beta1_SpeakerDiarizationConfig_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_google_cloud_speech_v1p1beta1_SpeakerDiarizationConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_speech_v1p1beta1_SpeakerDiarizationConfig_descriptor, new java.lang.String[] { "EnableSpeakerDiarization", "MinSpeakerCount", "MaxSpeakerCount", "SpeakerTag", }); internal_static_google_cloud_speech_v1p1beta1_RecognitionMetadata_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_google_cloud_speech_v1p1beta1_RecognitionMetadata_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_speech_v1p1beta1_RecognitionMetadata_descriptor, new java.lang.String[] { "InteractionType", "IndustryNaicsCodeOfAudio", "MicrophoneDistance", "OriginalMediaType", "RecordingDeviceType", "RecordingDeviceName", "OriginalMimeType", "ObfuscatedId", "AudioTopic", }); internal_static_google_cloud_speech_v1p1beta1_SpeechContext_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_google_cloud_speech_v1p1beta1_SpeechContext_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_speech_v1p1beta1_SpeechContext_descriptor, new java.lang.String[] { "Phrases", "Boost", }); internal_static_google_cloud_speech_v1p1beta1_RecognitionAudio_descriptor = getDescriptor().getMessageTypes().get(9); internal_static_google_cloud_speech_v1p1beta1_RecognitionAudio_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_speech_v1p1beta1_RecognitionAudio_descriptor, new java.lang.String[] { "Content", "Uri", "AudioSource", }); internal_static_google_cloud_speech_v1p1beta1_RecognizeResponse_descriptor = getDescriptor().getMessageTypes().get(10); internal_static_google_cloud_speech_v1p1beta1_RecognizeResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_speech_v1p1beta1_RecognizeResponse_descriptor, new java.lang.String[] { "Results", "TotalBilledTime", "SpeechAdaptationInfo", "RequestId", "UsingLegacyModels", }); internal_static_google_cloud_speech_v1p1beta1_LongRunningRecognizeResponse_descriptor = getDescriptor().getMessageTypes().get(11); internal_static_google_cloud_speech_v1p1beta1_LongRunningRecognizeResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_speech_v1p1beta1_LongRunningRecognizeResponse_descriptor, new java.lang.String[] { "Results", "TotalBilledTime", "OutputConfig", "OutputError", "SpeechAdaptationInfo", "RequestId", }); internal_static_google_cloud_speech_v1p1beta1_LongRunningRecognizeMetadata_descriptor = getDescriptor().getMessageTypes().get(12); internal_static_google_cloud_speech_v1p1beta1_LongRunningRecognizeMetadata_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_speech_v1p1beta1_LongRunningRecognizeMetadata_descriptor, new java.lang.String[] { "ProgressPercent", "StartTime", "LastUpdateTime", "Uri", "OutputConfig", }); internal_static_google_cloud_speech_v1p1beta1_StreamingRecognizeResponse_descriptor = getDescriptor().getMessageTypes().get(13); internal_static_google_cloud_speech_v1p1beta1_StreamingRecognizeResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_speech_v1p1beta1_StreamingRecognizeResponse_descriptor, new java.lang.String[] { "Error", "Results", "SpeechEventType", "SpeechEventTime", "TotalBilledTime", "SpeechAdaptationInfo", "RequestId", }); internal_static_google_cloud_speech_v1p1beta1_StreamingRecognitionResult_descriptor = getDescriptor().getMessageTypes().get(14); internal_static_google_cloud_speech_v1p1beta1_StreamingRecognitionResult_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_speech_v1p1beta1_StreamingRecognitionResult_descriptor, new java.lang.String[] { "Alternatives", "IsFinal", "Stability", "ResultEndTime", "ChannelTag", "LanguageCode", }); internal_static_google_cloud_speech_v1p1beta1_SpeechRecognitionResult_descriptor = getDescriptor().getMessageTypes().get(15); internal_static_google_cloud_speech_v1p1beta1_SpeechRecognitionResult_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_speech_v1p1beta1_SpeechRecognitionResult_descriptor, new java.lang.String[] { "Alternatives", "ChannelTag", "ResultEndTime", "LanguageCode", }); internal_static_google_cloud_speech_v1p1beta1_SpeechRecognitionAlternative_descriptor = getDescriptor().getMessageTypes().get(16); internal_static_google_cloud_speech_v1p1beta1_SpeechRecognitionAlternative_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_speech_v1p1beta1_SpeechRecognitionAlternative_descriptor, new java.lang.String[] { "Transcript", "Confidence", "Words", }); internal_static_google_cloud_speech_v1p1beta1_WordInfo_descriptor = getDescriptor().getMessageTypes().get(17); internal_static_google_cloud_speech_v1p1beta1_WordInfo_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_speech_v1p1beta1_WordInfo_descriptor, new java.lang.String[] { "StartTime", "EndTime", "Word", "Confidence", "SpeakerTag", "SpeakerLabel", }); internal_static_google_cloud_speech_v1p1beta1_SpeechAdaptationInfo_descriptor = getDescriptor().getMessageTypes().get(18); internal_static_google_cloud_speech_v1p1beta1_SpeechAdaptationInfo_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_speech_v1p1beta1_SpeechAdaptationInfo_descriptor, new java.lang.String[] { "AdaptationTimeout", "TimeoutMessage", }); com.google.protobuf.ExtensionRegistry registry = com.google.protobuf.ExtensionRegistry.newInstance(); registry.add(com.google.api.ClientProto.defaultHost); registry.add(com.google.api.FieldBehaviorProto.fieldBehavior); registry.add(com.google.api.AnnotationsProto.http); registry.add(com.google.api.ClientProto.methodSignature); registry.add(com.google.api.ClientProto.oauthScopes); registry.add(com.google.longrunning.OperationsProto.operationInfo); com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor( descriptor, registry); com.google.api.AnnotationsProto.getDescriptor(); com.google.api.ClientProto.getDescriptor(); com.google.api.FieldBehaviorProto.getDescriptor(); com.google.cloud.speech.v1p1beta1.SpeechResourceProto.getDescriptor(); com.google.longrunning.OperationsProto.getDescriptor(); com.google.protobuf.DurationProto.getDescriptor(); com.google.protobuf.TimestampProto.getDescriptor(); com.google.protobuf.WrappersProto.getDescriptor(); com.google.rpc.StatusProto.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) }
apache/phoenix-connectors
36,485
phoenix5-hive4/src/it/java/org/apache/hadoop/hive/ql/QTestUtil.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql; import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_DATABASE_NAME; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.IOException; import java.io.OutputStream; import java.net.URL; import java.sql.SQLException; import java.util.ArrayList; import java.util.Arrays; import java.util.Deque; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.cli.CliDriver; import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.cli.control.AbstractCliConfig; import org.apache.hadoop.hive.common.io.CachingPrintStream; import org.apache.hadoop.hive.common.io.SessionStream; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.metadata.HiveMetaStoreClientWithLocalCache; import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import org.apache.hadoop.hive.ql.QTestMiniClusters.FsType; import org.apache.hadoop.hive.ql.cache.results.QueryResultsCache; import org.apache.hadoop.hive.ql.dataset.QTestDatasetHandler; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveMaterializedViewsRegistry; import org.apache.hadoop.hive.ql.metadata.InvalidTableException; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.metadata.events.NotificationEventPoll; import org.apache.hadoop.hive.ql.parse.ASTNode; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.ParseDriver; import org.apache.hadoop.hive.ql.parse.ParseException; import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.mapper.StatsSources; import org.apache.hadoop.hive.ql.processors.CommandProcessor; import org.apache.hadoop.hive.ql.processors.CommandProcessorException; import org.apache.hadoop.hive.ql.processors.CommandProcessorFactory; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.processors.HiveCommand; import org.apache.hadoop.hive.ql.qoption.QTestAuthorizerHandler; import org.apache.hadoop.hive.ql.qoption.QTestDisabledHandler; //import org.apache.hadoop.hive.ql.qoption.QTestDatabaseHandler; import org.apache.hadoop.hive.ql.qoption.QTestOptionDispatcher; import org.apache.hadoop.hive.ql.qoption.QTestReplaceHandler; import org.apache.hadoop.hive.ql.qoption.QTestSysDbHandler; import org.apache.hadoop.hive.ql.qoption.QTestTimezoneHandler; import org.apache.hadoop.hive.ql.qoption.QTestTransactional; import org.apache.hadoop.hive.ql.scheduled.QTestScheduledQueryCleaner; import org.apache.hadoop.hive.ql.scheduled.QTestScheduledQueryServiceProvider; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hive.common.util.ProcessUtils; import org.junit.Assert; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Preconditions; import com.google.common.base.Throwables; /** * QTestUtil. */ public class QTestUtil { private static final Logger LOG = LoggerFactory.getLogger("QTestUtil"); public static final String QTEST_LEAVE_FILES = "QTEST_LEAVE_FILES"; private final String[] testOnlyCommands = new String[]{ "crypto", "erasure" }; public static String DEBUG_HINT = "\nSee ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, " + "or check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/ for specific test cases logs."; private String testWarehouse; @Deprecated private final String testFiles; private final String outDir; protected final String logDir; private File inputFile; private String inputContent; private final Set<String> srcUDFs; private final FsType fsType; private ParseDriver pd; protected Hive db; protected HiveConf conf; protected HiveConf savedConf; private BaseSemanticAnalyzer sem; private CliDriver cliDriver; private final QTestMiniClusters miniClusters = new QTestMiniClusters(); private final QOutProcessor qOutProcessor; private static QTestResultProcessor qTestResultProcessor = new QTestResultProcessor(); protected QTestDatasetHandler datasetHandler; protected QTestReplaceHandler replaceHandler; private final String initScript; private final String cleanupScript; QTestOptionDispatcher dispatcher = new QTestOptionDispatcher(); private boolean isSessionStateStarted = false; public CliDriver getCliDriver() { if (cliDriver == null) { throw new RuntimeException("no clidriver"); } return cliDriver; } /** * Returns the default UDF names which should not be removed when resetting the test database * * @return The list of the UDF names not to remove */ private Set<String> getSrcUDFs() { HashSet<String> srcUDFs = new HashSet<String>(); // FIXME: moved default value to here...for now // i think this features is never really used from the command line String defaultTestSrcUDFs = "qtest_get_java_boolean"; for (String srcUDF : QTestSystemProperties.getSourceUdfs(defaultTestSrcUDFs)) { srcUDF = srcUDF.trim(); if (!srcUDF.isEmpty()) { srcUDFs.add(srcUDF); } } if (srcUDFs.isEmpty()) { throw new RuntimeException("Source UDFs cannot be empty"); } return srcUDFs; } public HiveConf getConf() { return conf; } public void initConf() throws Exception { if (QTestSystemProperties.isVectorizationEnabled()) { conf.setBoolVar(ConfVars.HIVE_VECTORIZATION_ENABLED, true); } // Plug verifying metastore in for testing DirectSQL. conf.setVar(ConfVars.METASTORE_RAW_STORE_IMPL, "org.apache.hadoop.hive.metastore.VerifyingObjectStore"); miniClusters.initConf(conf); } public QTestUtil(QTestArguments testArgs) throws Exception { LOG.info("Setting up QTestUtil with outDir={}, logDir={}, clusterType={}, confDir={}," + " initScript={}, cleanupScript={}, withLlapIo={}, fsType={}", testArgs.getOutDir(), testArgs.getLogDir(), testArgs.getClusterType(), testArgs.getConfDir(), testArgs.getInitScript(), testArgs.getCleanupScript(), testArgs.isWithLlapIo(), testArgs.getFsType()); logClassPath(); Preconditions.checkNotNull(testArgs.getClusterType(), "ClusterType cannot be null"); this.fsType = testArgs.getFsType(); this.outDir = testArgs.getOutDir(); this.logDir = testArgs.getLogDir(); this.srcUDFs = getSrcUDFs(); this.replaceHandler = new QTestReplaceHandler(); this.qOutProcessor = new QOutProcessor(fsType, replaceHandler); // HIVE-14443 move this fall-back logic to CliConfigs if (testArgs.getConfDir() != null && !testArgs.getConfDir().isEmpty()) { HiveConf.setHiveSiteLocation(new URL("file://" + new File(testArgs.getConfDir()).toURI().getPath() + "/hive-site.xml")); MetastoreConf.setHiveSiteLocation(HiveConf.getHiveSiteLocation()); System.out.println("Setting hive-site: " + HiveConf.getHiveSiteLocation()); } // For testing configurations set by System.setProperties System.setProperty("hive.query.max.length", "100Mb"); conf = new HiveConf(IDriver.class); setCustomConfs(conf, testArgs.getCustomConfs()); setMetaStoreProperties(); final String scriptsDir = getScriptsDir(conf); this.miniClusters.setup(testArgs, conf, scriptsDir, logDir); initConf(); datasetHandler = new QTestDatasetHandler(conf); testFiles = datasetHandler.getDataDir(conf); conf.set("test.data.dir", datasetHandler.getDataDir(conf)); conf.setVar(ConfVars.HIVE_QUERY_RESULTS_CACHE_DIRECTORY, "/tmp/hive/_resultscache_" + ProcessUtils.getPid()); dispatcher.register("dataset", datasetHandler); dispatcher.register("replace", replaceHandler); dispatcher.register("sysdb", new QTestSysDbHandler()); dispatcher.register("transactional", new QTestTransactional()); dispatcher.register("scheduledqueryservice", new QTestScheduledQueryServiceProvider(conf)); dispatcher.register("scheduledquerycleaner", new QTestScheduledQueryCleaner()); dispatcher.register("timezone", new QTestTimezoneHandler()); dispatcher.register("authorizer", new QTestAuthorizerHandler()); dispatcher.register("disabled", new QTestDisabledHandler()); // dispatcher.register("database", new QTestDatabaseHandler()); this.initScript = scriptsDir + File.separator + testArgs.getInitScript(); this.cleanupScript = scriptsDir + File.separator + testArgs.getCleanupScript(); savedConf = new HiveConf(conf); } private void setCustomConfs(HiveConf conf, Map<ConfVars,String> customConfigValueMap) { customConfigValueMap.entrySet().forEach(item-> conf.set(item.getKey().varname, item.getValue())); } private void logClassPath() { String classpath = System.getProperty("java.class.path"); String[] classpathEntries = classpath.split(File.pathSeparator); LOG.info("QTestUtil classpath: " + String.join("\n", Arrays.asList(classpathEntries))); } private void setMetaStoreProperties() { setMetastoreConfPropertyFromSystemProperty(MetastoreConf.ConfVars.CONNECT_URL_KEY); setMetastoreConfPropertyFromSystemProperty(MetastoreConf.ConfVars.CONNECTION_DRIVER); setMetastoreConfPropertyFromSystemProperty(MetastoreConf.ConfVars.CONNECTION_USER_NAME); setMetastoreConfPropertyFromSystemProperty(MetastoreConf.ConfVars.PWD); setMetastoreConfPropertyFromSystemProperty(MetastoreConf.ConfVars.AUTO_CREATE_ALL); } private void setMetastoreConfPropertyFromSystemProperty(MetastoreConf.ConfVars var) { if (System.getProperty(var.getVarname()) != null) { if (var.getDefaultVal().getClass() == Boolean.class) { MetastoreConf.setBoolVar(conf, var, Boolean.getBoolean(System.getProperty(var.getVarname()))); } else { MetastoreConf.setVar(conf, var, System.getProperty(var.getVarname())); } } } public static String getScriptsDir(HiveConf conf) { // Use the current directory if it is not specified String scriptsDir = conf.get("test.data.scripts"); if (scriptsDir == null) { scriptsDir = new File(".").getAbsolutePath() + "/data/scripts"; } return scriptsDir; } public void shutdown() throws Exception { if (System.getenv(QTEST_LEAVE_FILES) == null) { cleanUp(); } miniClusters.shutDown(); Hive.closeCurrent(); } public void setInputFile(String queryFile) throws IOException { setInputFile(new File(queryFile)); } public void setInputFile(File qf) throws IOException { String query = FileUtils.readFileToString(qf); inputFile = qf; inputContent = query; qTestResultProcessor.init(query); qOutProcessor.initMasks(query); } public final File getInputFile() { return inputFile; } /** * Clear out any side effects of running tests */ public void clearPostTestEffects() throws Exception { dispatcher.afterTest(this); miniClusters.postTest(conf); } public void clearKeysCreatedInTests() { if (miniClusters.getHdfsEncryptionShim() == null) { return; } try { for (String keyAlias : miniClusters.getHdfsEncryptionShim().getKeys()) { miniClusters.getHdfsEncryptionShim().deleteKey(keyAlias); } } catch (IOException e) { LOG.error("Fail to clean the keys created in test due to the error", e); } } public void clearUDFsCreatedDuringTests() throws Exception { if (System.getenv(QTEST_LEAVE_FILES) != null) { return; } // Delete functions created by the tests // It is enough to remove functions from the default database, other databases are dropped for (String udfName : db.getFunctions(DEFAULT_DATABASE_NAME, ".*")) { if (!srcUDFs.contains(udfName)) { db.dropFunction(DEFAULT_DATABASE_NAME, udfName); } } } /** * Clear out any side effects of running tests */ public void clearTablesCreatedDuringTests() throws Exception { if (System.getenv(QTEST_LEAVE_FILES) != null) { return; } conf.set("hive.metastore.filter.hook", "org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl"); db = Hive.get(conf); // First delete any MVs to avoid race conditions for (String dbName : db.getAllDatabases()) { SessionState.get().setCurrentDatabase(dbName); for (String tblName : db.getAllTables()) { Table tblObj = null; try { tblObj = db.getTable(tblName); } catch (InvalidTableException e) { LOG.warn("Trying to drop table " + e.getTableName() + ". But it does not exist."); continue; } // only remove MVs first if (!tblObj.isMaterializedView()) { continue; } db.dropTable(dbName, tblName, true, true, fsType == FsType.ENCRYPTED_HDFS); HiveMaterializedViewsRegistry.get().dropMaterializedView(tblObj.getDbName(), tblObj.getTableName()); } } // Delete any tables other than the source tables // and any databases other than the default database. for (String dbName : db.getAllDatabases()) { SessionState.get().setCurrentDatabase(dbName); // FIXME: HIVE-24130 should remove this if (dbName.equalsIgnoreCase("tpch_0_001")) { continue; } for (String tblName : db.getAllTables()) { if (!DEFAULT_DATABASE_NAME.equals(dbName) || !QTestDatasetHandler.isSourceTable(tblName)) { try { db.getTable(tblName); } catch (InvalidTableException e) { LOG.warn("Trying to drop table " + e.getTableName() + ". But it does not exist."); continue; } db.dropTable(dbName, tblName, true, true, miniClusters.fsNeedsPurge(fsType)); } } if (!DEFAULT_DATABASE_NAME.equals(dbName)) { // Drop cascade, functions dropped by cascade db.dropDatabase(dbName, true, true, true); } } // delete remaining directories for external tables (can affect stats for following tests) try { Path p = new Path(testWarehouse); FileSystem fileSystem = p.getFileSystem(conf); if (fileSystem.exists(p)) { for (FileStatus status : fileSystem.listStatus(p)) { if (status.isDirectory() && !QTestDatasetHandler.isSourceTable(status.getPath().getName())) { fileSystem.delete(status.getPath(), true); } } } } catch (IllegalArgumentException e) { // ignore.. provides invalid url sometimes intentionally } SessionState.get().setCurrentDatabase(DEFAULT_DATABASE_NAME); List<String> roleNames = db.getAllRoleNames(); for (String roleName : roleNames) { if (!"PUBLIC".equalsIgnoreCase(roleName) && !"ADMIN".equalsIgnoreCase(roleName)) { db.dropRole(roleName); } } } public void newSession() throws Exception { newSession(true); } public void newSession(boolean canReuseSession) throws Exception { // allocate and initialize a new conf since a test can // modify conf by using 'set' commands conf = new HiveConf(savedConf); initConf(); initConfFromSetup(); // renew the metastore since the cluster type is unencrypted db = Hive.get(conf); // propagate new conf to meta store HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER, "org.apache.hadoop.hive.ql.security.DummyAuthenticator"); CliSessionState ss = new CliSessionState(conf); ss.in = System.in; SessionState oldSs = SessionState.get(); miniClusters.restartSessions(canReuseSession, ss, oldSs); closeSession(oldSs); SessionState.start(ss); cliDriver = new CliDriver(); File outf = new File(logDir, "initialize.log"); setSessionOutputs(ss, outf); } /** * Clear out any side effects of running tests */ public void clearTestSideEffects() throws Exception { if (System.getenv(QTEST_LEAVE_FILES) != null) { return; } // the test might have configured security/etc; open a new session to get rid of that newSession(); // Remove any cached results from the previous test. Utilities.clearWorkMap(conf); NotificationEventPoll.shutdown(); QueryResultsCache.cleanupInstance(); clearTablesCreatedDuringTests(); clearUDFsCreatedDuringTests(); clearKeysCreatedInTests(); StatsSources.clearGlobalStats(); dispatcher.afterTest(this); } protected void initConfFromSetup() throws Exception { miniClusters.preTest(conf); } public void cleanUp() throws Exception { if (!isSessionStateStarted) { startSessionState(qTestResultProcessor.canReuseSession()); } if (System.getenv(QTEST_LEAVE_FILES) != null) { return; } conf.setBoolean("hive.test.shutdown.phase", true); clearKeysCreatedInTests(); String metastoreDb = QTestSystemProperties.getMetaStoreDb(); if (metastoreDb == null || "derby".equalsIgnoreCase(metastoreDb)) { // otherwise, the docker container is already destroyed by this time cleanupFromFile(); } // delete any contents in the warehouse dir Path p = new Path(testWarehouse); FileSystem fs = p.getFileSystem(conf); try { FileStatus[] ls = fs.listStatus(p); for (int i = 0; (ls != null) && (i < ls.length); i++) { fs.delete(ls[i].getPath(), true); } } catch (FileNotFoundException e) { // Best effort } // TODO: Clean up all the other paths that are created. FunctionRegistry.unregisterTemporaryUDF("test_udaf"); FunctionRegistry.unregisterTemporaryUDF("test_error"); } private void cleanupFromFile() throws IOException { File cleanupFile = new File(cleanupScript); if (cleanupFile.isFile()) { String cleanupCommands = FileUtils.readFileToString(cleanupFile); LOG.info("Cleanup (" + cleanupScript + "):\n" + cleanupCommands); try { getCliDriver().processLine(cleanupCommands); } catch (CommandProcessorException e) { LOG.error("Failed during cleanup processLine with code={}. Ignoring", e.getResponseCode()); // TODO Convert this to an Assert.fail once HIVE-14682 is fixed } } else { LOG.info("No cleanup script detected. Skipping."); } } public void createSources() throws Exception { if (!isSessionStateStarted) { startSessionState(qTestResultProcessor.canReuseSession()); } getCliDriver().processLine("set test.data.dir=" + testFiles + ";"); conf.setBoolean("hive.test.init.phase", true); initFromScript(); conf.setBoolean("hive.test.init.phase", false); } private void initFromScript() throws IOException { File scriptFile = new File(this.initScript); if (!scriptFile.isFile()) { LOG.info("No init script detected. Skipping"); return; } String initCommands = FileUtils.readFileToString(scriptFile); LOG.info("Initial setup (" + initScript + "):\n" + initCommands); try { cliDriver.processLine(initCommands); LOG.info("Result from cliDrriver.processLine in createSources=0"); } catch (CommandProcessorException e) { Assert.fail("Failed during createSources processLine with code=" + e.getResponseCode()); } } public void postInit() throws Exception { miniClusters.postInit(conf); sem = new SemanticAnalyzer(new QueryState.Builder().withHiveConf(conf).build()); testWarehouse = conf.getVar(HiveConf.ConfVars.METASTORE_WAREHOUSE); db = Hive.get(conf); pd = new ParseDriver(); initMaterializedViews(); // Create views registry firstStartSessionState(); // setup metastore client cache if (conf.getBoolVar(ConfVars.MSC_CACHE_ENABLED)) { HiveMetaStoreClientWithLocalCache.init(conf); } } private void initMaterializedViews() { String registryImpl = db.getConf().get("hive.server2.materializedviews.registry.impl"); db.getConf().set("hive.server2.materializedviews.registry.impl", "DUMMY"); HiveMaterializedViewsRegistry.get().init(db); db.getConf().set("hive.server2.materializedviews.registry.impl", registryImpl); } //FIXME: check why mr is needed for starting a session state from conf private void firstStartSessionState() { String execEngine = conf.get("hive.execution.engine"); conf.set("hive.execution.engine", "mr"); SessionState.start(conf); conf.set("hive.execution.engine", execEngine); } public String cliInit() throws Exception { File file = Objects.requireNonNull(inputFile); String fileName = inputFile.getName(); dispatcher.process(file); dispatcher.beforeTest(this); if (!qTestResultProcessor.canReuseSession()) { newSession(false); } CliSessionState ss = (CliSessionState) SessionState.get(); String outFileExtension = getOutFileExtension(); String stdoutName = null; if (outDir != null) { // TODO: why is this needed? File qf = new File(outDir, fileName); stdoutName = qf.getName().concat(outFileExtension); } else { stdoutName = fileName + outFileExtension; } File outf = new File(logDir, stdoutName); setSessionOutputs(ss, outf); ss.setIsQtestLogging(true); if (fileName.equals("init_file.q")) { ss.initFiles.add(AbstractCliConfig.HIVE_ROOT + "/data/scripts/test_init_file.sql"); } cliDriver.processInitFiles(ss); return outf.getAbsolutePath(); } private void setSessionOutputs(CliSessionState ss, File outf) throws Exception { OutputStream fo = new BufferedOutputStream(new FileOutputStream(outf)); if (ss.out != null) { ss.out.flush(); } if (ss.err != null) { ss.err.flush(); } qTestResultProcessor.setOutputs(ss, fo); ss.err = new CachingPrintStream(fo, true, "UTF-8"); ss.setIsSilent(true); ss.setIsQtestLogging(true); } public CliSessionState startSessionState(boolean canReuseSession) throws IOException { HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER, "org.apache.hadoop.hive.ql.security.DummyAuthenticator"); //FIXME: check why mr is needed for starting a session state from conf String execEngine = conf.get("hive.execution.engine"); conf.set("hive.execution.engine", "mr"); CliSessionState ss = new CliSessionState(conf); ss.in = System.in; ss.out = new SessionStream(System.out); ss.err = new SessionStream(System.out); SessionState oldSs = SessionState.get(); miniClusters.restartSessions(canReuseSession, ss, oldSs); closeSession(oldSs); SessionState.start(ss); isSessionStateStarted = true; conf.set("hive.execution.engine", execEngine); return ss; } private void closeSession(SessionState oldSs) throws IOException { if (oldSs != null && oldSs.out != null && oldSs.out != System.out) { oldSs.out.close(); } if (oldSs != null) { oldSs.close(); } } public int executeAdHocCommand(String q) throws CommandProcessorException { if (!q.contains(";")) { return -1; } String q1 = q.split(";")[0] + ";"; LOG.debug("Executing " + q1); cliDriver.processLine(q1); return 0; } public CommandProcessorResponse executeClient() throws CommandProcessorException { return executeClientInternal(getCommand()); } private CommandProcessorResponse executeClientInternal(String commands) throws CommandProcessorException { List<String> cmds = CliDriver.splitSemiColon(commands); CommandProcessorResponse response = new CommandProcessorResponse(); StringBuilder command = new StringBuilder(); QTestSyntaxUtil qtsu = new QTestSyntaxUtil(this, conf, pd); qtsu.checkQFileSyntax(cmds); for (String oneCmd : cmds) { if (StringUtils.endsWith(oneCmd, "\\")) { command.append(StringUtils.chop(oneCmd) + "\\;"); continue; } else { if (isHiveCommand(oneCmd)) { command.setLength(0); } command.append(oneCmd); } if (StringUtils.isBlank(command.toString())) { continue; } String strCommand = command.toString(); try { if (isCommandUsedForTesting(strCommand)) { response = executeTestCommand(strCommand); } else { response = cliDriver.processLine(strCommand); } } catch (CommandProcessorException e) { if (!ignoreErrors()) { throw e; } } command.setLength(0); } if (SessionState.get() != null) { SessionState.get().setLastCommand(null); // reset } return response; } /** * This allows a .q file to continue executing after a statement runs into an error which is convenient * if you want to use another hive cmd after the failure to sanity check the state of the system. */ private boolean ignoreErrors() { return conf.getBoolVar(HiveConf.ConfVars.CLI_IGNORE_ERRORS); } boolean isHiveCommand(String command) { String[] cmd = command.trim().split("\\s+"); if (HiveCommand.find(cmd) != null) { return true; } else if (HiveCommand.find(cmd, HiveCommand.ONLY_FOR_TESTING) != null) { return true; } else { return false; } } private CommandProcessorResponse executeTestCommand(String command) throws CommandProcessorException { String commandName = command.trim().split("\\s+")[0]; String commandArgs = command.trim().substring(commandName.length()); if (commandArgs.endsWith(";")) { commandArgs = StringUtils.chop(commandArgs); } //replace ${hiveconf:hive.metastore.warehouse.dir} with actual dir if existed. //we only want the absolute path, so remove the header, such as hdfs://localhost:57145 String wareHouseDir = SessionState.get().getConf().getVar(ConfVars.METASTORE_WAREHOUSE).replaceAll("^[a-zA-Z]+://.*?:\\d+", ""); commandArgs = commandArgs.replaceAll("\\$\\{hiveconf:hive\\.metastore\\.warehouse\\.dir\\}", wareHouseDir); if (SessionState.get() != null) { SessionState.get().setLastCommand(commandName + " " + commandArgs.trim()); } enableTestOnlyCmd(SessionState.get().getConf()); try { CommandProcessor proc = getTestCommand(commandName); if (proc != null) { try { CommandProcessorResponse response = proc.run(commandArgs.trim()); return response; } catch (CommandProcessorException e) { SessionState.getConsole().printError(e.toString(), e.getCause() != null ? Throwables.getStackTraceAsString(e.getCause()) : ""); throw e; } } else { throw new RuntimeException("Could not get CommandProcessor for command: " + commandName); } } catch (Exception e) { throw new RuntimeException("Could not execute test command", e); } } private CommandProcessor getTestCommand(final String commandName) throws SQLException { HiveCommand testCommand = HiveCommand.find(new String[]{ commandName }, HiveCommand.ONLY_FOR_TESTING); if (testCommand == null) { return null; } return CommandProcessorFactory.getForHiveCommandInternal(new String[]{ commandName }, SessionState.get().getConf(), testCommand.isOnlyForTesting()); } private void enableTestOnlyCmd(HiveConf conf) { StringBuilder securityCMDs = new StringBuilder(conf.getVar(HiveConf.ConfVars.HIVE_SECURITY_COMMAND_WHITELIST)); for (String c : testOnlyCommands) { securityCMDs.append(","); securityCMDs.append(c); } conf.set(HiveConf.ConfVars.HIVE_SECURITY_COMMAND_WHITELIST.toString(), securityCMDs.toString()); } private boolean isCommandUsedForTesting(final String command) { String commandName = command.trim().split("\\s+")[0]; HiveCommand testCommand = HiveCommand.find(new String[]{ commandName }, HiveCommand.ONLY_FOR_TESTING); return testCommand != null; } private String getCommand() { String commands = inputContent; StringBuilder newCommands = new StringBuilder(commands.length()); int lastMatchEnd = 0; Matcher commentMatcher = Pattern.compile("^--.*$", Pattern.MULTILINE).matcher(commands); // remove the comments while (commentMatcher.find()) { newCommands.append(commands.substring(lastMatchEnd, commentMatcher.start())); lastMatchEnd = commentMatcher.end(); } newCommands.append(commands.substring(lastMatchEnd, commands.length())); commands = newCommands.toString(); return commands; } private String getOutFileExtension() { return ".out"; } public QTestProcessExecResult checkNegativeResults(String tname, Exception e) throws Exception { String outFileExtension = getOutFileExtension(); File qf = new File(outDir, tname); String expf = outPath(outDir.toString(), tname.concat(outFileExtension)); File outf = null; outf = new File(logDir); outf = new File(outf, qf.getName().concat(outFileExtension)); FileWriter outfd = new FileWriter(outf); if (e instanceof ParseException) { outfd.write("Parse Error: "); } else if (e instanceof SemanticException) { outfd.write("Semantic Exception: \n"); } else { outfd.close(); throw e; } outfd.write(e.getMessage()); outfd.close(); QTestProcessExecResult result = qTestResultProcessor.executeDiffCommand(outf.getPath(), expf, false); if (QTestSystemProperties.shouldOverwriteResults()) { qTestResultProcessor.overwriteResults(outf.getPath(), expf); return QTestProcessExecResult.createWithoutOutput(0); } return result; } public QTestProcessExecResult checkNegativeResults(String tname, Error e) throws Exception { String outFileExtension = getOutFileExtension(); File qf = new File(outDir, tname); String expf = outPath(outDir.toString(), tname.concat(outFileExtension)); File outf = null; outf = new File(logDir); outf = new File(outf, qf.getName().concat(outFileExtension)); FileWriter outfd = new FileWriter(outf, true); outfd.write("FAILED: " + e.getClass().getSimpleName() + " " + e.getClass().getName() + ": " + e.getMessage() + "\n"); outfd.close(); QTestProcessExecResult result = qTestResultProcessor.executeDiffCommand(outf.getPath(), expf, false); if (QTestSystemProperties.shouldOverwriteResults()) { qTestResultProcessor.overwriteResults(outf.getPath(), expf); return QTestProcessExecResult.createWithoutOutput(0); } return result; } /** * Given the current configurations (e.g., hadoop version and execution mode), return * the correct file name to compare with the current test run output. * * @param outDir The directory where the reference log files are stored. * @param testName The test file name (terminated by ".out"). * @return The file name appended with the configuration values if it exists. */ public String outPath(String outDir, String testName) { String ret = (new File(outDir, testName)).getPath(); // List of configurations. Currently the list consists of hadoop version and execution mode only List<String> configs = new ArrayList<String>(); configs.add(miniClusters.getClusterType().getQOutFileExtensionPostfix()); Deque<String> stack = new LinkedList<String>(); StringBuilder sb = new StringBuilder(); sb.append(testName); stack.push(sb.toString()); // example file names are input1.q.out_mr_0.17 or input2.q.out_0.17 for (String s : configs) { sb.append('_'); sb.append(s); stack.push(sb.toString()); } while (stack.size() > 0) { String fileName = stack.pop(); File f = new File(outDir, fileName); if (f.exists()) { ret = f.getPath(); break; } } return ret; } public QTestProcessExecResult checkCliDriverResults() throws Exception { String tname = inputFile.getName(); String outFileExtension = getOutFileExtension(); String outFileName = outPath(outDir, tname + outFileExtension); File f = new File(logDir, tname + outFileExtension); qOutProcessor.maskPatterns(f.getPath()); if (QTestSystemProperties.shouldOverwriteResults()) { qTestResultProcessor.overwriteResults(f.getPath(), outFileName); return QTestProcessExecResult.createWithoutOutput(0); } else { return qTestResultProcessor.executeDiffCommand(f.getPath(), outFileName, false); } } public ASTNode parseQuery() throws Exception { return pd.parse(inputContent).getTree(); } public List<Task<?>> analyzeAST(ASTNode ast) throws Exception { // Do semantic analysis and plan generation Context ctx = new Context(conf); while ((ast.getToken() == null) && (ast.getChildCount() > 0)) { ast = (ASTNode) ast.getChild(0); } sem.getOutputs().clear(); sem.getInputs().clear(); sem.analyze(ast, ctx); ctx.clear(); return sem.getRootTasks(); } // for negative tests, which is succeeded.. no need to print the query string public void failed(String fname, String debugHint) { Assert.fail("Client Execution was expected to fail, but succeeded with error code 0 for fname=" + fname + (debugHint != null ? (" " + debugHint) : "")); } public void failedDiff(int ecode, String fname, String debugHint) { String message = "Client Execution succeeded but contained differences " + "(error code = " + ecode + ") after executing " + fname + (debugHint != null ? (" " + debugHint) : ""); LOG.error(message); Assert.fail(message); } public void failedQuery(Throwable e, int ecode, String fname, String debugHint) { String command = SessionState.get() != null ? SessionState.get().getLastCommand() : null; String message = String.format( "Client execution failed with error code = %d %nrunning %s %nfname=%s%n%s%n %s", ecode, command != null ? command : "", fname, debugHint != null ? debugHint : "", e == null ? "" : org.apache.hadoop.util.StringUtils.stringifyException(e)); LOG.error(message); Assert.fail(message); } public void failedWithException(Exception e, String fname, String debugHint) { String command = SessionState.get() != null ? SessionState.get().getLastCommand() : null; System.err.println("Failed query: " + fname); System.err.flush(); Assert.fail("Unexpected exception " + org.apache.hadoop.util.StringUtils.stringifyException(e) + "\n" + (command != null ? " running " + command : "") + (debugHint != null ? debugHint : "")); } public QOutProcessor getQOutProcessor() { return qOutProcessor; } public static void initEventNotificationPoll() throws Exception { NotificationEventPoll.initialize(SessionState.get().getConf()); } }
googleapis/google-cloud-java
36,635
java-gkehub/proto-google-cloud-gkehub-v1alpha/src/main/java/com/google/cloud/gkehub/configmanagement/v1alpha/PolicyControllerState.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/gkehub/v1alpha/configmanagement/configmanagement.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.gkehub.configmanagement.v1alpha; /** * * * <pre> * State for PolicyControllerState. * </pre> * * Protobuf type {@code google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState} */ public final class PolicyControllerState extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState) PolicyControllerStateOrBuilder { private static final long serialVersionUID = 0L; // Use PolicyControllerState.newBuilder() to construct. private PolicyControllerState(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private PolicyControllerState() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new PolicyControllerState(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.gkehub.configmanagement.v1alpha.ConfigManagementProto .internal_static_google_cloud_gkehub_configmanagement_v1alpha_PolicyControllerState_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.gkehub.configmanagement.v1alpha.ConfigManagementProto .internal_static_google_cloud_gkehub_configmanagement_v1alpha_PolicyControllerState_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState.class, com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState.Builder.class); } private int bitField0_; public static final int VERSION_FIELD_NUMBER = 1; private com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersion version_; /** * * * <pre> * The version of Gatekeeper Policy Controller deployed. * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersion version = 1;</code> * * @return Whether the version field is set. */ @java.lang.Override public boolean hasVersion() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The version of Gatekeeper Policy Controller deployed. * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersion version = 1;</code> * * @return The version. */ @java.lang.Override public com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersion getVersion() { return version_ == null ? com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersion .getDefaultInstance() : version_; } /** * * * <pre> * The version of Gatekeeper Policy Controller deployed. * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersion version = 1;</code> */ @java.lang.Override public com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersionOrBuilder getVersionOrBuilder() { return version_ == null ? com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersion .getDefaultInstance() : version_; } public static final int DEPLOYMENT_STATE_FIELD_NUMBER = 2; private com.google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentState deploymentState_; /** * * * <pre> * The state about the policy controller installation. * </pre> * * <code> * .google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentState deployment_state = 2; * </code> * * @return Whether the deploymentState field is set. */ @java.lang.Override public boolean hasDeploymentState() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The state about the policy controller installation. * </pre> * * <code> * .google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentState deployment_state = 2; * </code> * * @return The deploymentState. */ @java.lang.Override public com.google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentState getDeploymentState() { return deploymentState_ == null ? com.google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentState .getDefaultInstance() : deploymentState_; } /** * * * <pre> * The state about the policy controller installation. * </pre> * * <code> * .google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentState deployment_state = 2; * </code> */ @java.lang.Override public com.google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentStateOrBuilder getDeploymentStateOrBuilder() { return deploymentState_ == null ? com.google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentState .getDefaultInstance() : deploymentState_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getVersion()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getDeploymentState()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getVersion()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getDeploymentState()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState)) { return super.equals(obj); } com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState other = (com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState) obj; if (hasVersion() != other.hasVersion()) return false; if (hasVersion()) { if (!getVersion().equals(other.getVersion())) return false; } if (hasDeploymentState() != other.hasDeploymentState()) return false; if (hasDeploymentState()) { if (!getDeploymentState().equals(other.getDeploymentState())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasVersion()) { hash = (37 * hash) + VERSION_FIELD_NUMBER; hash = (53 * hash) + getVersion().hashCode(); } if (hasDeploymentState()) { hash = (37 * hash) + DEPLOYMENT_STATE_FIELD_NUMBER; hash = (53 * hash) + getDeploymentState().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * State for PolicyControllerState. * </pre> * * Protobuf type {@code google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState) com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerStateOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.gkehub.configmanagement.v1alpha.ConfigManagementProto .internal_static_google_cloud_gkehub_configmanagement_v1alpha_PolicyControllerState_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.gkehub.configmanagement.v1alpha.ConfigManagementProto .internal_static_google_cloud_gkehub_configmanagement_v1alpha_PolicyControllerState_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState.class, com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState.Builder.class); } // Construct using // com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getVersionFieldBuilder(); getDeploymentStateFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; version_ = null; if (versionBuilder_ != null) { versionBuilder_.dispose(); versionBuilder_ = null; } deploymentState_ = null; if (deploymentStateBuilder_ != null) { deploymentStateBuilder_.dispose(); deploymentStateBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.gkehub.configmanagement.v1alpha.ConfigManagementProto .internal_static_google_cloud_gkehub_configmanagement_v1alpha_PolicyControllerState_descriptor; } @java.lang.Override public com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState getDefaultInstanceForType() { return com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState .getDefaultInstance(); } @java.lang.Override public com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState build() { com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState buildPartial() { com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState result = new com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.version_ = versionBuilder_ == null ? version_ : versionBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.deploymentState_ = deploymentStateBuilder_ == null ? deploymentState_ : deploymentStateBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState) { return mergeFrom( (com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState other) { if (other == com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState .getDefaultInstance()) return this; if (other.hasVersion()) { mergeVersion(other.getVersion()); } if (other.hasDeploymentState()) { mergeDeploymentState(other.getDeploymentState()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getVersionFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getDeploymentStateFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersion version_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersion, com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersion.Builder, com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersionOrBuilder> versionBuilder_; /** * * * <pre> * The version of Gatekeeper Policy Controller deployed. * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersion version = 1; * </code> * * @return Whether the version field is set. */ public boolean hasVersion() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The version of Gatekeeper Policy Controller deployed. * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersion version = 1; * </code> * * @return The version. */ public com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersion getVersion() { if (versionBuilder_ == null) { return version_ == null ? com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersion .getDefaultInstance() : version_; } else { return versionBuilder_.getMessage(); } } /** * * * <pre> * The version of Gatekeeper Policy Controller deployed. * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersion version = 1; * </code> */ public Builder setVersion( com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersion value) { if (versionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } version_ = value; } else { versionBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The version of Gatekeeper Policy Controller deployed. * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersion version = 1; * </code> */ public Builder setVersion( com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersion.Builder builderForValue) { if (versionBuilder_ == null) { version_ = builderForValue.build(); } else { versionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The version of Gatekeeper Policy Controller deployed. * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersion version = 1; * </code> */ public Builder mergeVersion( com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersion value) { if (versionBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && version_ != null && version_ != com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersion .getDefaultInstance()) { getVersionBuilder().mergeFrom(value); } else { version_ = value; } } else { versionBuilder_.mergeFrom(value); } if (version_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * The version of Gatekeeper Policy Controller deployed. * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersion version = 1; * </code> */ public Builder clearVersion() { bitField0_ = (bitField0_ & ~0x00000001); version_ = null; if (versionBuilder_ != null) { versionBuilder_.dispose(); versionBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The version of Gatekeeper Policy Controller deployed. * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersion version = 1; * </code> */ public com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersion.Builder getVersionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getVersionFieldBuilder().getBuilder(); } /** * * * <pre> * The version of Gatekeeper Policy Controller deployed. * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersion version = 1; * </code> */ public com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersionOrBuilder getVersionOrBuilder() { if (versionBuilder_ != null) { return versionBuilder_.getMessageOrBuilder(); } else { return version_ == null ? com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersion .getDefaultInstance() : version_; } } /** * * * <pre> * The version of Gatekeeper Policy Controller deployed. * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersion version = 1; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersion, com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersion.Builder, com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersionOrBuilder> getVersionFieldBuilder() { if (versionBuilder_ == null) { versionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersion, com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersion.Builder, com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerVersionOrBuilder>( getVersion(), getParentForChildren(), isClean()); version_ = null; } return versionBuilder_; } private com.google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentState deploymentState_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentState, com.google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentState.Builder, com.google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentStateOrBuilder> deploymentStateBuilder_; /** * * * <pre> * The state about the policy controller installation. * </pre> * * <code> * .google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentState deployment_state = 2; * </code> * * @return Whether the deploymentState field is set. */ public boolean hasDeploymentState() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The state about the policy controller installation. * </pre> * * <code> * .google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentState deployment_state = 2; * </code> * * @return The deploymentState. */ public com.google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentState getDeploymentState() { if (deploymentStateBuilder_ == null) { return deploymentState_ == null ? com.google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentState .getDefaultInstance() : deploymentState_; } else { return deploymentStateBuilder_.getMessage(); } } /** * * * <pre> * The state about the policy controller installation. * </pre> * * <code> * .google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentState deployment_state = 2; * </code> */ public Builder setDeploymentState( com.google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentState value) { if (deploymentStateBuilder_ == null) { if (value == null) { throw new NullPointerException(); } deploymentState_ = value; } else { deploymentStateBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The state about the policy controller installation. * </pre> * * <code> * .google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentState deployment_state = 2; * </code> */ public Builder setDeploymentState( com.google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentState.Builder builderForValue) { if (deploymentStateBuilder_ == null) { deploymentState_ = builderForValue.build(); } else { deploymentStateBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The state about the policy controller installation. * </pre> * * <code> * .google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentState deployment_state = 2; * </code> */ public Builder mergeDeploymentState( com.google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentState value) { if (deploymentStateBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && deploymentState_ != null && deploymentState_ != com.google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentState .getDefaultInstance()) { getDeploymentStateBuilder().mergeFrom(value); } else { deploymentState_ = value; } } else { deploymentStateBuilder_.mergeFrom(value); } if (deploymentState_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * The state about the policy controller installation. * </pre> * * <code> * .google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentState deployment_state = 2; * </code> */ public Builder clearDeploymentState() { bitField0_ = (bitField0_ & ~0x00000002); deploymentState_ = null; if (deploymentStateBuilder_ != null) { deploymentStateBuilder_.dispose(); deploymentStateBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The state about the policy controller installation. * </pre> * * <code> * .google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentState deployment_state = 2; * </code> */ public com.google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentState.Builder getDeploymentStateBuilder() { bitField0_ |= 0x00000002; onChanged(); return getDeploymentStateFieldBuilder().getBuilder(); } /** * * * <pre> * The state about the policy controller installation. * </pre> * * <code> * .google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentState deployment_state = 2; * </code> */ public com.google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentStateOrBuilder getDeploymentStateOrBuilder() { if (deploymentStateBuilder_ != null) { return deploymentStateBuilder_.getMessageOrBuilder(); } else { return deploymentState_ == null ? com.google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentState .getDefaultInstance() : deploymentState_; } } /** * * * <pre> * The state about the policy controller installation. * </pre> * * <code> * .google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentState deployment_state = 2; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentState, com.google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentState.Builder, com.google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentStateOrBuilder> getDeploymentStateFieldBuilder() { if (deploymentStateBuilder_ == null) { deploymentStateBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentState, com.google.cloud.gkehub.configmanagement.v1alpha.GatekeeperDeploymentState.Builder, com.google.cloud.gkehub.configmanagement.v1alpha .GatekeeperDeploymentStateOrBuilder>( getDeploymentState(), getParentForChildren(), isClean()); deploymentState_ = null; } return deploymentStateBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState) } // @@protoc_insertion_point(class_scope:google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState) private static final com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState(); } public static com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<PolicyControllerState> PARSER = new com.google.protobuf.AbstractParser<PolicyControllerState>() { @java.lang.Override public PolicyControllerState parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<PolicyControllerState> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<PolicyControllerState> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.gkehub.configmanagement.v1alpha.PolicyControllerState getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }